text
stringlengths 7
1.01M
|
|---|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.ccc.model.v20170705;
import com.aliyuncs.RpcAcsRequest;
/**
* @author auto create
* @version
*/
public class AddNumberToSkillGroupRequest extends RpcAcsRequest<AddNumberToSkillGroupResponse> {
public AddNumberToSkillGroupRequest() {
super("CCC", "2017-07-05", "AddNumberToSkillGroup");
}
private String number;
private String instanceId;
private String skillGroupId;
public String getNumber() {
return this.number;
}
public void setNumber(String number) {
this.number = number;
if(number != null){
putQueryParameter("Number", number);
}
}
public String getInstanceId() {
return this.instanceId;
}
public void setInstanceId(String instanceId) {
this.instanceId = instanceId;
if(instanceId != null){
putQueryParameter("InstanceId", instanceId);
}
}
public String getSkillGroupId() {
return this.skillGroupId;
}
public void setSkillGroupId(String skillGroupId) {
this.skillGroupId = skillGroupId;
if(skillGroupId != null){
putQueryParameter("SkillGroupId", skillGroupId);
}
}
@Override
public Class<AddNumberToSkillGroupResponse> getResponseClass() {
return AddNumberToSkillGroupResponse.class;
}
}
|
package uk.gov.justice.hmpps.prison.test;
import org.apache.commons.lang3.StringUtils;
import org.springframework.test.context.ActiveProfilesResolver;
import java.util.Objects;
/**
* Sets active profiles for integration tests based on an environment property.
*/
public class DatasourceActiveProfilesResolver implements ActiveProfilesResolver {
@Override
public String[] resolve(final Class<?> testClass) {
final var datasourceProfile = System.getenv("api.db.target");
final var datasourceDialect = System.getenv("api.db.dialect");
Objects.requireNonNull(datasourceProfile, "'api.db.target' environment variable must be specified.");
return new String[]{
datasourceProfile + "-" + StringUtils.defaultIfBlank(datasourceDialect, "hsqldb")
};
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.superbiz.enricher.jpa;
import org.jboss.arquillian.container.test.spi.client.deployment.AuxiliaryArchiveProcessor;
import org.jboss.shrinkwrap.api.Archive;
import org.superbiz.enricher.maven.Enrichers;
public class OpenJPAEnricher implements AuxiliaryArchiveProcessor {
@Override
public void process(final Archive<?> auxiliaryArchive) {
Enrichers.wrap(auxiliaryArchive).addAsLibraries(Enrichers.resolve("src/test/resources/openjpa-pom.xml"));
}
}
|
package com.springmvc.ketao.service;
import java.util.List;
import com.springmvc.ketao.entity.Student;
public interface IStudentManager {
public void addStudent(Student student) throws Exception;
public boolean delStudent(String id);
public boolean delStudentsByIds(String []ids);
public boolean updateStudent(Student student);
public Student getStudent(String id);
public List<Student> getStudentsBySearch(String search);
public List<Student> getAllStudents();
}
|
package org.cyclops.colossalchests.network.packet;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.entity.player.ServerPlayerEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.network.play.server.SSetSlotPacket;
import net.minecraft.world.World;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import org.cyclops.cyclopscore.network.CodecField;
import org.cyclops.cyclopscore.network.PacketCodec;
/**
* Packet for setting slots with id's larger than max short size (65535).
* {@link SSetSlotPacket}.
* @author rubensworks
*
*/
public class SetSlotLarge extends PacketCodec {
@CodecField
private int windowId;
@CodecField
private int slot;
@CodecField
private ItemStack itemStack;
public SetSlotLarge() {
}
public SetSlotLarge(int windowId, int slot, ItemStack itemStack) {
this.windowId = windowId;
this.slot = slot;
this.itemStack = itemStack;
}
@Override
public boolean isAsync() {
return false;
}
@Override
@OnlyIn(Dist.CLIENT)
public void actionClient(World world, PlayerEntity player) {
// Modified code from NetHandlerPlayClient#handleSetSlot
if (windowId == player.openContainer.windowId) {
player.openContainer.putStackInSlot(slot, itemStack);
}
}
@Override
public void actionServer(World world, ServerPlayerEntity player) {
}
}
|
/**
* Copyright (c) 2010-2019 Mark Allen, Norbert Bartels.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.restfb.types.send.airline;
import com.restfb.Facebook;
import com.restfb.types.AbstractFacebookType;
import lombok.Getter;
public class ProductInfo extends AbstractFacebookType {
public ProductInfo(String title, String value) {
this.title = title;
this.value = value;
}
@Getter
@Facebook
private String title;
@Getter
@Facebook
private String value;
}
|
package act.controller;
/*-
* #%L
* ACT Framework
* %%
* Copyright (C) 2014 - 2017 ActFramework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public final class ParamNames {
public static final String PATH = "__path";
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import com.google.common.base.Preconditions;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.InetSocketAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.retry.FailoverProxyProvider;
import org.apache.hadoop.io.retry.RetryPolicies;
import org.apache.hadoop.io.retry.RetryProxy;
import org.apache.hadoop.ipc.*;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
public class JobTrackerProxies {
private static final Log LOG = LogFactory.getLog(JobTrackerProxies.class);
/**
* Wrapper for a client proxy as well as its associated service ID.
* This is simply used as a tuple-like return type for
* {@link JobTrackerProxies#createProxy} and
* {@link JobTrackerProxies#createNonHAProxy}.
*/
public static class ProxyAndInfo<PROXYTYPE> {
private final PROXYTYPE proxy;
private final Text dtService;
public ProxyAndInfo(PROXYTYPE proxy, Text dtService) {
this.proxy = proxy;
this.dtService = dtService;
}
public PROXYTYPE getProxy() {
return proxy;
}
public Text getDelegationTokenService() {
return dtService;
}
}
@SuppressWarnings("unchecked")
public static <T> ProxyAndInfo<T> createProxy(Configuration conf,
String jtAddress, Class<T> xface) throws IOException {
Class<FailoverProxyProvider<T>> failoverProxyProviderClass =
getFailoverProxyProviderClass(conf, jtAddress, xface);
if (failoverProxyProviderClass == null) {
// Non-HA case
return createNonHAProxy(conf, NetUtils.createSocketAddr(jtAddress), xface,
UserGroupInformation.getCurrentUser(), true);
} else {
// HA case
FailoverProxyProvider<T> failoverProxyProvider =
createFailoverProxyProvider(conf, failoverProxyProviderClass, xface,
jtAddress);
int maxFailoverAttempts =
conf.getInt(HAUtil.MR_CLIENT_FAILOVER_MAX_ATTEMPTS_KEY,
HAUtil.MR_CLIENT_FAILOVER_MAX_ATTEMPTS_DEFAULT);
long failoverSleepBaseMillis =
conf.getInt(HAUtil.MR_CLIENT_FAILOVER_SLEEPTIME_BASE_KEY,
HAUtil.MR_CLIENT_FAILOVER_SLEEPTIME_BASE_DEFAULT);
long failoverSleepMaxMillis =
conf.getInt(HAUtil.MR_CLIENT_FAILOVER_SLEEPTIME_MAX_KEY,
HAUtil.MR_CLIENT_FAILOVER_SLEEPTIME_MAX_DEFAULT);
T proxy = (T) RetryProxy.create(xface, failoverProxyProvider, RetryPolicies
.failoverOnNetworkException(RetryPolicies.TRY_ONCE_THEN_FAIL,
maxFailoverAttempts, failoverSleepBaseMillis,
failoverSleepMaxMillis));
Text dtService = HAUtil.buildTokenServiceForLogicalAddress(jtAddress);
return new ProxyAndInfo<T>(proxy, dtService);
}
}
private static <T> Class<FailoverProxyProvider<T>> getFailoverProxyProviderClass(
Configuration conf, String jtAddress, Class<T> xface) throws IOException {
if (jtAddress == null) {
return null;
}
String configKey = DFSUtil.addKeySuffixes(
HAUtil.MR_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX,
HAUtil.getLogicalName(jtAddress));
return (Class<FailoverProxyProvider<T>>)
conf.getClass(configKey, null, FailoverProxyProvider.class);
}
public static <T> ProxyAndInfo<T> createNonHAProxy(
Configuration conf, InetSocketAddress jtAddr, Class<T> xface,
UserGroupInformation ugi, boolean withRetries) throws IOException {
Text dtService = SecurityUtil.buildTokenService(jtAddr);
T proxy;
if (xface == JobSubmissionProtocol.class) {
proxy = (T) createJTProxyWithJobSubmissionProtocol(jtAddr, conf, ugi,
withRetries);
} else if (xface == InterTrackerProtocol.class) {
proxy = (T) createJTProxyWithInterTrackerProtocol(jtAddr, conf, ugi,
withRetries);
} else {
throw new IOException("wrong protocol");
}
return new ProxyAndInfo<T>(proxy, dtService);
}
private static int getRpcTimeout(Configuration conf) {
return conf.getInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY,
CommonConfigurationKeys.IPC_PING_INTERVAL_DEFAULT);
}
private static JobSubmissionProtocol createJTProxyWithJobSubmissionProtocol(
InetSocketAddress address, Configuration conf, UserGroupInformation ugi,
boolean withRetries) throws IOException {
RPC.setProtocolEngine(conf, JobSubmissionProtocol.class, WritableRpcEngine.class);
final long version = RPC.getProtocolVersion(JobSubmissionProtocol.class);
int rpcTimeout = JobClient.getRpcTimeout(conf);
RPC.getProxy(JobSubmissionProtocol.class, version, address, ugi, conf,
NetUtils.getDefaultSocketFactory(conf), rpcTimeout);
JobSubmissionProtocol proxy = RPC.getProtocolProxy(
JobSubmissionProtocol.class, version, address, ugi, conf,
NetUtils.getDefaultSocketFactory(conf), getRpcTimeout(conf), null).getProxy();
return proxy;
}
private static InterTrackerProtocol createJTProxyWithInterTrackerProtocol(
InetSocketAddress address, Configuration conf, UserGroupInformation ugi,
boolean withRetries) throws IOException {
RPC.setProtocolEngine(conf, InterTrackerProtocol.class, WritableRpcEngine.class);
return RPC.waitForProtocolProxy(
InterTrackerProtocol.class, InterTrackerProtocol.versionID, address,
conf, getRpcTimeout(conf), null, Long.MAX_VALUE).getProxy();
}
@SuppressWarnings("unchecked")
private static <T> FailoverProxyProvider<T> createFailoverProxyProvider(
Configuration conf, Class<FailoverProxyProvider<T>> failoverProxyProviderClass,
Class<T> xface, String jtAddress) throws IOException {
Preconditions.checkArgument(
xface.isAssignableFrom(JTProtocols.class),
"Interface %s is not a JobTracker protocol", xface);
try {
Constructor<FailoverProxyProvider<T>> ctor = failoverProxyProviderClass
.getConstructor(Configuration.class, String.class, Class.class);
FailoverProxyProvider<?> provider = ctor.newInstance(conf, jtAddress,
xface);
return (FailoverProxyProvider<T>) provider;
} catch (Exception e) {
String message = "Couldn't create proxy provider " + failoverProxyProviderClass;
if (LOG.isDebugEnabled()) {
LOG.debug(message, e);
}
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
} else {
throw new IOException(message, e);
}
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.metamodel.query.parser;
import java.util.List;
import junit.framework.TestCase;
public class QueryPartParserTest extends TestCase {
public void testParseNone() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, " ", ",").parse();
List<String> items = itemParser.getTokens();
assertEquals(0, items.size());
assertEquals("[]", items.toString());
}
public void testParseSingle() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "foo ", ",").parse();
List<String> items = itemParser.getTokens();
assertEquals(1, items.size());
assertEquals("[foo]", items.toString());
assertEquals("[null]", itemParser.getDelims().toString());
}
public void testParseMultiple() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "foo , bar", ",").parse();
List<String> items = itemParser.getTokens();
assertEquals(2, items.size());
assertEquals("[foo, bar]", items.toString());
}
public void testParseWithParenthesis() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "foo , bar (a,b,c),(doh)", ",").parse();
List<String> items = itemParser.getTokens();
assertEquals("[foo, bar (a,b,c), (doh)]", items.toString());
assertEquals(3, items.size());
}
public void testMultipleDelims() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "foo AND bar OR baz AND (foo( AND bar) OR baz)", " AND ", " OR ").parse();
List<String> items = itemParser.getTokens();
assertEquals(4, items.size());
assertEquals("[foo, bar, baz, (foo( AND bar) OR baz)]", items.toString());
assertEquals("[null, AND , OR , AND ]", itemParser.getDelims().toString());
}
public void testEmptyClause() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "", ",").parse();
assertEquals(0, itemParser.getTokens().size());
}
public void testEmptyParenthesis() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "()", ",").parse();
assertEquals(0, itemParser.getTokens().size());
}
public void testMultiParenthesisLevels() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "(((Hello world)))", ",").parse();
assertEquals(1, itemParser.getTokens().size());
assertEquals("Hello world", itemParser.getTokens().get(0));
}
public void testOuterParenthesis() throws Exception {
QueryPartCollectionProcessor itemParser = new QueryPartCollectionProcessor();
new QueryPartParser(itemParser, "(foo,bar)", ",").parse();
List<String> items = itemParser.getTokens();
assertEquals(2, items.size());
assertEquals("[foo, bar]", items.toString());
}
}
|
/*
* Copyright 2002-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.foo;
import java.util.List;
import org.w3c.dom.Element;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.util.CollectionUtils;
import org.springframework.util.xml.DomUtils;
public class ComponentBeanDefinitionParser extends AbstractBeanDefinitionParser {
@Override
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
return parseComponentElement(element);
}
private static AbstractBeanDefinition parseComponentElement(Element element) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(ComponentFactoryBean.class);
factory.addPropertyValue("parent", parseComponent(element));
List<Element> childElements = DomUtils.getChildElementsByTagName(element, "component");
if (!CollectionUtils.isEmpty(childElements)) {
parseChildComponents(childElements, factory);
}
return factory.getBeanDefinition();
}
private static BeanDefinition parseComponent(Element element) {
BeanDefinitionBuilder component = BeanDefinitionBuilder.rootBeanDefinition(Component.class);
component.addPropertyValue("name", element.getAttribute("name"));
return component.getBeanDefinition();
}
private static void parseChildComponents(List<Element> childElements, BeanDefinitionBuilder factory) {
ManagedList<BeanDefinition> children = new ManagedList<BeanDefinition>(childElements.size());
for (Element element : childElements) {
children.add(parseComponentElement(element));
}
factory.addPropertyValue("children", children);
}
}
|
package pl.mp107.plugtext.db;
public class SyntaxSchema {
private int _id;
private String patternBuiltins;
private String patternComments;
private String patternFileExtensions;
private String patternKeywords;
private String patternLines;
private String patternNumbers;
private String patternPreprocessors;
//private String[] authors;
private String description;
private String name;
private int version = -1;
public SyntaxSchema() {
}
public SyntaxSchema(int _id, String patternBuiltins, String patternComments, String patternFileExtensions, String patternKeywords, String patternLines, String patternNumbers, String patternPreprocessors, String description, String name, int version) {
this._id = _id;
this.patternBuiltins = patternBuiltins;
this.patternComments = patternComments;
this.patternFileExtensions = patternFileExtensions;
this.patternKeywords = patternKeywords;
this.patternLines = patternLines;
this.patternNumbers = patternNumbers;
this.patternPreprocessors = patternPreprocessors;
this.description = description;
this.name = name;
this.version = version;
}
public SyntaxSchema(String patternBuiltins, String patternComments, String patternFileExtensions, String patternKeywords, String patternLines, String patternNumbers, String patternPreprocessors, String description, String name, int version) {
this.patternBuiltins = patternBuiltins;
this.patternComments = patternComments;
this.patternFileExtensions = patternFileExtensions;
this.patternKeywords = patternKeywords;
this.patternLines = patternLines;
this.patternNumbers = patternNumbers;
this.patternPreprocessors = patternPreprocessors;
this.description = description;
this.name = name;
this.version = version;
}
public int get_id() {
return _id;
}
public void set_id(int _id) {
this._id = _id;
}
public String getPatternBuiltins() {
return patternBuiltins;
}
public void setPatternBuiltins(String patternBuiltins) {
this.patternBuiltins = patternBuiltins;
}
public String getPatternComments() {
return patternComments;
}
public void setPatternComments(String patternComments) {
this.patternComments = patternComments;
}
public String getPatternKeywords() {
return patternKeywords;
}
public void setPatternKeywords(String patternKeywords) {
this.patternKeywords = patternKeywords;
}
public String getPatternLines() {
return patternLines;
}
public void setPatternLines(String patternLines) {
this.patternLines = patternLines;
}
public String getPatternNumbers() {
return patternNumbers;
}
public void setPatternNumbers(String patternNumbers) {
this.patternNumbers = patternNumbers;
}
public String getPatternPreprocessors() {
return patternPreprocessors;
}
public void setPatternPreprocessors(String patternPreprocessors) {
this.patternPreprocessors = patternPreprocessors;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public String getPatternFileExtensions() {
return patternFileExtensions;
}
public void setPatternFileExtensions(String patternFileExtensions) {
this.patternFileExtensions = patternFileExtensions;
}
@Override
public String toString() {
return "[" + get_id() + "] " + getName() + " - " + getDescription() + ", by " + "TODO" + "...";
}
}
|
package com.reactnative_stripeterminal;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.ReadableMapKeySetIterator;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import com.stripe.stripeterminal.callable.Callback;
import com.stripe.stripeterminal.callable.Cancelable;
import com.stripe.stripeterminal.callable.ConnectionTokenCallback;
import com.stripe.stripeterminal.callable.ConnectionTokenProvider;
import com.stripe.stripeterminal.callable.DiscoveryListener;
import com.stripe.stripeterminal.callable.PaymentIntentCallback;
import com.stripe.stripeterminal.callable.ReaderCallback;
import com.stripe.stripeterminal.callable.ReaderDisplayListener;
import com.stripe.stripeterminal.callable.ReaderSoftwareUpdateCallback;
import com.stripe.stripeterminal.callable.ReaderSoftwareUpdateListener;
import com.stripe.stripeterminal.callable.TerminalListener;
import com.stripe.stripeterminal.log.LogLevel;
import com.stripe.stripeterminal.model.external.ConnectionStatus;
import com.stripe.stripeterminal.model.external.ConnectionTokenException;
import com.stripe.stripeterminal.model.external.DeviceType;
import com.stripe.stripeterminal.model.external.DiscoveryConfiguration;
import com.stripe.stripeterminal.model.external.PaymentIntent;
import com.stripe.stripeterminal.model.external.PaymentIntentParameters;
import com.stripe.stripeterminal.model.external.PaymentStatus;
import com.stripe.stripeterminal.model.external.Reader;
import com.stripe.stripeterminal.model.external.ReaderDisplayMessage;
import com.stripe.stripeterminal.model.external.ReaderEvent;
import com.stripe.stripeterminal.model.external.ReaderInputOptions;
import com.stripe.stripeterminal.model.external.ReaderSoftwareUpdate;
import com.stripe.stripeterminal.model.external.TerminalException;
import com.stripe.stripeterminal.Terminal;
import java.sql.Wrapper;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static com.reactnative_stripeterminal.Constants.*;
public class RNStripeTerminalModule extends ReactContextBaseJavaModule implements TerminalListener, ConnectionTokenProvider, ReaderDisplayListener, ReaderSoftwareUpdateListener, DiscoveryListener {
final static String TAG = RNStripeTerminalModule.class.getSimpleName();
final static String moduleName = "RNStripeTerminal";
Cancelable pendingDiscoverReaders = null;
Cancelable pendingCreatePaymentIntent = null;
PaymentIntent lastPaymentIntent = null;
ReaderEvent lastReaderEvent=ReaderEvent.CARD_REMOVED;
ConnectionTokenCallback pendingConnectionTokenCallback = null;
String lastCurrency = null;
List<? extends Reader> discoveredReadersList = null;
ReaderSoftwareUpdate readerSoftwareUpdate;
Cancelable pendingInstallUpdate = null;
public RNStripeTerminalModule(ReactApplicationContext reactContext) {
super(reactContext);
}
ReactContext getContext(){
return getReactApplicationContext();
}
@Override
public String getName() {
return moduleName;
}
@Nullable
@Override
public Map<String, Object> getConstants() {
return constants;
}
public void sendEventWithName(String eventName, WritableMap eventData){
getContext().getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, eventData);
}
public void sendEventWithName(String eventName, Object eventData){
getContext().getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, eventData);
}
public void sendEventWithName(String eventName, WritableArray eventData){
getContext().getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, eventData);
}
WritableMap serializeUpdate(ReaderSoftwareUpdate readerSoftwareUpdate){
WritableMap writableMap = Arguments.createMap();
WritableMap updateMap = Arguments.createMap();
if(readerSoftwareUpdate!=null){
ReaderSoftwareUpdate.UpdateTimeEstimate updateTimeEstimate= readerSoftwareUpdate.getTimeEstimate();
updateMap.putString(ESTIMATED_UPDATE_TIME,updateTimeEstimate.getDescription());
updateMap.putString(DEVICE_SOFTWARE_VERSION,readerSoftwareUpdate.getVersion());
writableMap.putMap(UPDATE,updateMap);
}
return writableMap;
}
WritableMap serializeReader(Reader reader) {
WritableMap writableMap = Arguments.createMap();
if(reader!=null) {
double batteryLevel = 0;
if(reader.getBatteryLevel()!=null)
batteryLevel = (double) reader.getBatteryLevel();
writableMap.putDouble(BATTERY_LEVEL, batteryLevel);
int readerType = 0;
if(reader.getDeviceType()!=null)
readerType = reader.getDeviceType().ordinal();
writableMap.putInt(DEVICE_TYPE, readerType);
String serial = "";
if(reader.getSerialNumber()!=null)
serial = reader.getSerialNumber();
writableMap.putString(SERIAL_NUMBER, serial);
String softwareVersion = "";
if(reader.getSoftwareVersion()!=null)
softwareVersion = reader.getSoftwareVersion();
writableMap.putString(DEVICE_SOFTWARE_VERSION, softwareVersion);
}
return writableMap;
}
WritableMap serializePaymentIntent(PaymentIntent paymentIntent,String currency){
WritableMap paymentIntentMap = Arguments.createMap();
paymentIntentMap.putString(STRIPE_ID,paymentIntent.getId());
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZZZZ");
paymentIntentMap.putString(CREATED,simpleDateFormat.format(new Date(paymentIntent.getCreated())));
paymentIntentMap.putInt(STATUS,paymentIntent.getStatus().ordinal());
paymentIntentMap.putInt(AMOUNT,paymentIntent.getAmount());
paymentIntentMap.putString(CURRENCY,currency);
WritableMap metaDataMap = Arguments.createMap();
if(paymentIntent.getMetadata()!=null){
for(String key:paymentIntent.getMetadata().keySet()){
metaDataMap.putString(key,String.valueOf(paymentIntent.getMetadata().get(key)));
}
}
paymentIntentMap.putMap(METADATA,metaDataMap);
return paymentIntentMap;
}
@ReactMethod
public void discoverReaders(int deviceType, int method, int simulated) {
boolean isSimulated = simulated == 0?false:true;
try {
DeviceType devType = DeviceType.values()[deviceType];
DiscoveryConfiguration discoveryConfiguration = new DiscoveryConfiguration(0, devType, isSimulated);
Callback statusCallback = new Callback() {
@Override
public void onSuccess() {
pendingDiscoverReaders = null;
WritableMap readerCompletionResponse = Arguments.createMap();
sendEventWithName(EVENT_READER_DISCOVERY_COMPLETION,readerCompletionResponse);
}
@Override
public void onFailure(@Nonnull TerminalException e) {
pendingDiscoverReaders = null;
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR, e.getErrorMessage());
sendEventWithName(EVENT_READER_DISCOVERY_COMPLETION,errorMap);
}
};
abortDiscoverReaders();
pendingDiscoverReaders = Terminal.getInstance().discoverReaders(discoveryConfiguration, this, statusCallback);
}catch (Exception e){
e.printStackTrace();
if(e.getMessage()!=null) {
WritableMap writableMap = Arguments.createMap();
writableMap.putString(ERROR,e.getMessage());
sendEventWithName(EVENT_READER_DISCOVERY_COMPLETION, writableMap);
}
}
}
@ReactMethod
public void initialize(com.facebook.react.bridge.Callback callback) {
try {
//Check if stripe is initialized
Terminal.getInstance();
WritableMap writableMap = Arguments.createMap();
writableMap.putBoolean("isInitialized", true);
callback.invoke(writableMap);
return;
}catch (IllegalStateException e){ }
pendingConnectionTokenCallback = null;
abortDiscoverReaders();
abortCreatePayment();
abortInstallUpdate();
LogLevel logLevel = LogLevel.VERBOSE;
ConnectionTokenProvider tokenProvider = this;
TerminalListener terminalListener = this;
String err = "";
boolean isInit =false;
try {
Terminal.initTerminal(getContext().getApplicationContext(), logLevel, tokenProvider, terminalListener);
lastReaderEvent = ReaderEvent.CARD_REMOVED;
isInit = true;
} catch (TerminalException e) {
e.printStackTrace();
err = e.getErrorMessage();
isInit = false;
} catch (IllegalStateException ex){
ex.printStackTrace();
err= ex.getMessage();
isInit = true;
}
WritableMap writableMap = Arguments.createMap();
writableMap.putBoolean("isInitialized", isInit);
if(!isInit) {
writableMap.putString(ERROR, err);
}
callback.invoke(writableMap);
}
@ReactMethod
public void setConnectionToken(String token,String errorMsg){
if(pendingConnectionTokenCallback!=null){
if(errorMsg!=null && !errorMsg.trim().isEmpty()){
pendingConnectionTokenCallback.onFailure(new ConnectionTokenException(errorMsg));
}else{
pendingConnectionTokenCallback.onSuccess(token);
}
}
pendingConnectionTokenCallback = null;
}
@ReactMethod
public void createPayment(final ReadableMap options) {
PaymentIntentCallback paymentIntentCallback = new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull final PaymentIntent paymentIntent) {
pendingCreatePaymentIntent = Terminal.getInstance().collectPaymentMethod(paymentIntent, RNStripeTerminalModule.this
, new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull final PaymentIntent collectedIntent) {
pendingCreatePaymentIntent = null;
Terminal.getInstance().processPayment(collectedIntent, new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull PaymentIntent confirmedIntent) {
WritableMap intentMap = Arguments.createMap();
String currency = "";
if (options != null && options.hasKey(CURRENCY)) {
currency = options.getString(CURRENCY);
}
intentMap.putMap(INTENT, serializePaymentIntent(confirmedIntent, currency));
sendEventWithName(EVENT_PAYMENT_CREATION, intentMap);
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR, e.getErrorMessage());
errorMap.putInt(CODE, e.getErrorCode().ordinal());
String currency = "";
if (options != null && options.hasKey(CURRENCY)) {
currency = options.getString(CURRENCY);
}
errorMap.putMap(INTENT, serializePaymentIntent(collectedIntent, currency));
sendEventWithName(EVENT_PAYMENT_CREATION, errorMap);
}
});
}
@Override
public void onFailure(@Nonnull TerminalException e) {
pendingCreatePaymentIntent = null;
WritableMap collectionErrorMap = Arguments.createMap();
collectionErrorMap.putString(ERROR, e.getErrorMessage());
collectionErrorMap.putInt(CODE, e.getErrorCode().ordinal());
String currency = "";
if (options != null && options.hasKey(CURRENCY)) {
currency = options.getString(CURRENCY);
}
collectionErrorMap.putMap(INTENT, serializePaymentIntent(paymentIntent, currency));
sendEventWithName(EVENT_PAYMENT_CREATION, collectionErrorMap);
}
});
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap paymentCreationMap = Arguments.createMap();
paymentCreationMap.putString(ERROR, e.getErrorMessage());
paymentCreationMap.putInt(CODE, e.getErrorCode().ordinal());
sendEventWithName(EVENT_PAYMENT_CREATION, paymentCreationMap);
}
};
String paymentIntent = null;
if(options.hasKey(PAYMENT_INTENT))
paymentIntent = options.getString(PAYMENT_INTENT);
if (paymentIntent != null && !paymentIntent.trim().isEmpty()) {
Terminal.getInstance().retrievePaymentIntent(paymentIntent, paymentIntentCallback);
} else {
PaymentIntentParameters.Builder paymentIntentParamBuilder = getPaymentParams(options);
Terminal.getInstance().createPaymentIntent(paymentIntentParamBuilder.build(), paymentIntentCallback);
}
}
private PaymentIntentParameters.Builder getPaymentParams(ReadableMap options){
PaymentIntentParameters.Builder paymentIntentParamBuilder = new PaymentIntentParameters.Builder();
if(options!=null) {
if (options.hasKey(AMOUNT)) {
paymentIntentParamBuilder.setAmount(options.getInt(AMOUNT));
}
if (options.hasKey(CURRENCY)) {
paymentIntentParamBuilder.setCurrency(options.getString(CURRENCY));
}
if (options.hasKey(APPLICATION_FEE_AMOUNT)) {
paymentIntentParamBuilder.setApplicationFeeAmount(options.getInt(APPLICATION_FEE_AMOUNT));
}
if (options.hasKey(ON_BEHALF_OF)) {
paymentIntentParamBuilder.setOnBehalfOf(options.getString(ON_BEHALF_OF));
}
if (options.hasKey(TRANSFER_DATA_DESTINATION)) {
paymentIntentParamBuilder.setTransferDataDestination(options.getString(TRANSFER_DATA_DESTINATION));
}
if (options.hasKey(TRANSFER_GROUP)) {
paymentIntentParamBuilder.setTransferGroup(TRANSFER_GROUP);
}
if (options.hasKey(CUSTOMER)) {
paymentIntentParamBuilder.setCustomer(options.getString(CUSTOMER));
}
if (options.hasKey(DESCRIPTION)) {
paymentIntentParamBuilder.setDescription(options.getString(DESCRIPTION));
}
if (options.hasKey(STATEMENT_DESCRIPTOR)) {
paymentIntentParamBuilder.setStatementDescriptor(options.getString(STATEMENT_DESCRIPTOR));
}
if (options.hasKey(RECEIPT_EMAIL)) {
paymentIntentParamBuilder.setReceiptEmail(options.getString(RECEIPT_EMAIL));
}
if (options.hasKey(METADATA)) {
ReadableMap map = options.getMap(METADATA);
HashMap<String, String> metaDataMap = new HashMap<>();
if (map != null) {
ReadableMapKeySetIterator iterator = map.keySetIterator();
while (iterator.hasNextKey()) {
String key = iterator.nextKey();
String val = map.getString(key);
metaDataMap.put(key, val);
}
}
paymentIntentParamBuilder.setMetadata(metaDataMap);
}
}
return paymentIntentParamBuilder;
}
@ReactMethod
public void createPaymentIntent(ReadableMap options){
if(options!=null){
if(options.hasKey(CURRENCY))
lastCurrency = options.getString(CURRENCY);
}
PaymentIntentParameters.Builder paramsBuilder = getPaymentParams(options);
Terminal.getInstance().createPaymentIntent(paramsBuilder.build(), new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull PaymentIntent paymentIntent) {
lastPaymentIntent = paymentIntent;
WritableMap paymentIntentCreateRespMap = Arguments.createMap();
paymentIntentCreateRespMap.putMap(INTENT,serializePaymentIntent(paymentIntent,lastCurrency)); //No currency for android
sendEventWithName(EVENT_PAYMENT_INTENT_CREATION, paymentIntentCreateRespMap);
}
@Override
public void onFailure(@Nonnull TerminalException e) {
lastPaymentIntent = null;
WritableMap paymentIntentCreateRespMap = Arguments.createMap();
paymentIntentCreateRespMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_PAYMENT_INTENT_CREATION, paymentIntentCreateRespMap);
}
});
}
@ReactMethod
public void retrievePaymentIntent(String clientSecret){
if(clientSecret!=null) {
Terminal.getInstance().retrievePaymentIntent(clientSecret, new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull PaymentIntent paymentIntent) {
lastPaymentIntent = paymentIntent;
WritableMap paymentRetrieveRespMap = Arguments.createMap();
paymentRetrieveRespMap.putMap(INTENT,serializePaymentIntent(paymentIntent,"")); //No currency for android
sendEventWithName(EVENT_PAYMENT_INTENT_RETRIEVAL, paymentRetrieveRespMap);
}
@Override
public void onFailure(@Nonnull TerminalException e) {
lastPaymentIntent = null;
WritableMap paymentRetrieveRespMap = Arguments.createMap();
paymentRetrieveRespMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_PAYMENT_INTENT_RETRIEVAL, paymentRetrieveRespMap);
}
});
}else{
WritableMap paymentRetrieveRespMap = Arguments.createMap();
paymentRetrieveRespMap.putString(ERROR,"Client secret cannot be null");
sendEventWithName(EVENT_PAYMENT_INTENT_RETRIEVAL, paymentRetrieveRespMap);
}
}
@ReactMethod
public void cancelPaymentIntent(){
Terminal.getInstance().cancelPaymentIntent(lastPaymentIntent, new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull PaymentIntent paymentIntent) {
WritableMap paymentIntentCancelMap = Arguments.createMap();
paymentIntentCancelMap.putMap(INTENT,serializePaymentIntent(paymentIntent,lastCurrency));
sendEventWithName(EVENT_PAYMENT_INTENT_CANCEL,paymentIntentCancelMap);
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
errorMap.putInt(CODE,e.getErrorCode().ordinal());
errorMap.putMap(INTENT,serializePaymentIntent(lastPaymentIntent,lastCurrency));
sendEventWithName(EVENT_PAYMENT_INTENT_CANCEL,errorMap);
}
});
}
@ReactMethod
public void processPayment(){
Terminal.getInstance().processPayment(lastPaymentIntent, new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull PaymentIntent paymentIntent) {
lastPaymentIntent = paymentIntent;
WritableMap processPaymentMap = Arguments.createMap();
processPaymentMap.putMap(INTENT,serializePaymentIntent(paymentIntent,lastCurrency));
sendEventWithName(EVENT_PROCESS_PAYMENT,processPaymentMap);
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
errorMap.putInt(CODE,e.getErrorCode().ordinal());
errorMap.putString(DECLINE_CODE,e.getApiError().getDeclineCode());
errorMap.putMap(INTENT,serializePaymentIntent(lastPaymentIntent,lastCurrency));
sendEventWithName(EVENT_PROCESS_PAYMENT,errorMap);
}
});
}
@ReactMethod
public void collectPaymentMethod(){
pendingCreatePaymentIntent = Terminal.getInstance().collectPaymentMethod(lastPaymentIntent, this, new PaymentIntentCallback() {
@Override
public void onSuccess(@Nonnull PaymentIntent paymentIntent) {
pendingCreatePaymentIntent = null;
lastPaymentIntent = paymentIntent;
WritableMap collectPaymentMethodMap = Arguments.createMap();
collectPaymentMethodMap.putMap(INTENT,serializePaymentIntent(paymentIntent,lastCurrency));
sendEventWithName(EVENT_PAYMENT_METHOD_COLLECTION,collectPaymentMethodMap);
}
@Override
public void onFailure(@Nonnull TerminalException e) {
pendingCreatePaymentIntent = null;
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
errorMap.putInt(CODE,e.getErrorCode().ordinal());
errorMap.putMap(INTENT,serializePaymentIntent(lastPaymentIntent,lastCurrency));
sendEventWithName(EVENT_PAYMENT_METHOD_COLLECTION,errorMap);
}
});
}
@ReactMethod
public void connectReader(String serialNumber){
Reader selectedReader = null;
if(discoveredReadersList!=null && discoveredReadersList.size()>0){
for(Reader reader:discoveredReadersList){
if(reader!=null){
if(reader.getSerialNumber().equals(serialNumber)){
selectedReader = reader;
}
}
}
}
if(selectedReader!=null) {
Terminal.getInstance().connectReader(selectedReader, new ReaderCallback() {
@Override
public void onSuccess(@Nonnull Reader reader) {
sendEventWithName(EVENT_READER_CONNECTION, serializeReader(reader));
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR, e.getErrorMessage());
sendEventWithName(EVENT_READER_CONNECTION, errorMap);
}
});
}else{
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR, "No reader found with provided serial number");
sendEventWithName(EVENT_READER_CONNECTION,errorMap);
}
}
@ReactMethod
public void disconnectReader(){
if(Terminal.getInstance().getConnectedReader()==null){
sendEventWithName(EVENT_READER_DISCONNECTION_COMPLETION,Arguments.createMap());
}else{
Terminal.getInstance().disconnectReader(new Callback() {
@Override
public void onSuccess() {
sendEventWithName(EVENT_READER_DISCONNECTION_COMPLETION,Arguments.createMap());
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_READER_DISCONNECTION_COMPLETION,errorMap);
}
});
}
}
@ReactMethod
public void getLastReaderEvent(){
sendEventWithName(EVENT_LAST_READER_EVENT,new Integer(lastReaderEvent.ordinal()));
}
@ReactMethod
public void getConnectedReader(){
Reader reader = Terminal.getInstance().getConnectedReader();
sendEventWithName(EVENT_CONNECTED_READER,serializeReader(reader));
}
@ReactMethod
public void abortDiscoverReaders(){
if(pendingDiscoverReaders!=null && !pendingDiscoverReaders.isCompleted()){
pendingDiscoverReaders.cancel(new Callback() {
@Override
public void onSuccess() {
pendingDiscoverReaders = null;
sendEventWithName(EVENT_ABORT_DISCOVER_READER_COMPLETION,Arguments.createMap());
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_ABORT_DISCOVER_READER_COMPLETION,errorMap);
}
});
}else{
sendEventWithName(EVENT_ABORT_DISCOVER_READER_COMPLETION,Arguments.createMap());
}
}
@ReactMethod
public void abortCreatePayment(){
if(pendingCreatePaymentIntent!=null && !pendingCreatePaymentIntent.isCompleted()){
pendingCreatePaymentIntent.cancel(new Callback() {
@Override
public void onSuccess() {
pendingCreatePaymentIntent = null;
sendEventWithName(EVENT_ABORT_CREATE_PAYMENT_COMPLETION,Arguments.createMap());
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_ABORT_CREATE_PAYMENT_COMPLETION,errorMap);
}
});
}else{
sendEventWithName(EVENT_ABORT_CREATE_PAYMENT_COMPLETION,Arguments.createMap());
}
}
@ReactMethod
public void clearCachedCredentials(){
Terminal.getInstance().clearCachedCredentials();
}
@ReactMethod
public void abortInstallUpdate(){
if(pendingInstallUpdate!=null && !pendingInstallUpdate.isCompleted()){
pendingInstallUpdate.cancel(new Callback() {
@Override
public void onSuccess() {
pendingInstallUpdate = null;
sendEventWithName(EVENT_ABORT_INSTALL_COMPLETION,Arguments.createMap());
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_ABORT_INSTALL_COMPLETION,errorMap);
}
});
}else{
sendEventWithName(EVENT_ABORT_INSTALL_COMPLETION,Arguments.createMap());
}
}
@ReactMethod
public void installUpdate(){
pendingInstallUpdate = Terminal.getInstance().installUpdate(readerSoftwareUpdate,this, new Callback() {
@Override
public void onSuccess() {
sendEventWithName(EVENT_UPDATE_INSTALL,Arguments.createMap());
readerSoftwareUpdate = null;
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_UPDATE_INSTALL,errorMap);
}
});
}
@ReactMethod
public void checkForUpdate(){
Terminal.getInstance().checkForUpdate(new ReaderSoftwareUpdateCallback() {
@Override
public void onSuccess(@Nullable ReaderSoftwareUpdate readerSoftwareUpdate) {
RNStripeTerminalModule.this.readerSoftwareUpdate = readerSoftwareUpdate;
sendEventWithName(EVENT_UPDATE_CHECK,serializeUpdate(readerSoftwareUpdate));
}
@Override
public void onFailure(@Nonnull TerminalException e) {
WritableMap errorMap = Arguments.createMap();
errorMap.putString(ERROR,e.getErrorMessage());
sendEventWithName(EVENT_UPDATE_CHECK,errorMap);
}
});
}
@ReactMethod
public void getConnectionStatus(){
ConnectionStatus status = Terminal.getInstance().getConnectionStatus();
WritableMap statusMap = Arguments.createMap();
statusMap.putInt(EVENT_CONNECTION_STATUS,status.ordinal());
}
@ReactMethod
public void getPaymentStatus(){
PaymentStatus status = Terminal.getInstance().getPaymentStatus();
WritableMap statusMap = Arguments.createMap();
statusMap.putInt(EVENT_PAYMENT_STATUS,status.ordinal());
}
@Override
public void onUpdateDiscoveredReaders(@Nonnull List<? extends Reader> list) {
discoveredReadersList = list;
WritableArray readersDiscoveredArr = Arguments.createArray();
for(Reader reader : list){
if(reader!=null){
readersDiscoveredArr.pushMap(serializeReader(reader));
}
}
sendEventWithName(EVENT_READERS_DISCOVERED,readersDiscoveredArr);
}
@Override
public void fetchConnectionToken(@Nonnull ConnectionTokenCallback connectionTokenCallback) {
pendingConnectionTokenCallback = connectionTokenCallback;
sendEventWithName(EVENT_REQUEST_CONNECTION_TOKEN,Arguments.createMap());
}
@Override
public void onReportLowBatteryWarning() {
sendEventWithName(EVENT_DID_REPORT_LOW_BATTERY_WARNING,Arguments.createMap());
}
@Override
public void onConnectionStatusChange(@Nonnull ConnectionStatus status) {
WritableMap statusMap = Arguments.createMap();
statusMap.putInt(STATUS,status.ordinal());
sendEventWithName(EVENT_DID_CHANGE_CONNECTION_STATUS,statusMap);
}
@Override
public void onReportReaderEvent(@Nonnull ReaderEvent event) {
lastReaderEvent = event;
WritableMap readerEventReportMap = Arguments.createMap();
readerEventReportMap.putInt(EVENT,event.ordinal());
readerEventReportMap.putMap(INFO,Arguments.createMap());
sendEventWithName(EVENT_DID_REPORT_READER_EVENT, readerEventReportMap);
}
@Override
public void onPaymentStatusChange(@Nonnull PaymentStatus status) {
WritableMap paymentStatusMap = Arguments.createMap();
paymentStatusMap.putInt(STATUS,status.ordinal());
sendEventWithName(EVENT_DID_CHANGE_PAYMENT_STATUS,paymentStatusMap);
}
@Override
public void onUnexpectedReaderDisconnect(@Nonnull Reader reader) {
sendEventWithName(EVENT_DID_REPORT_UNEXPECTED_READER_DISCONNECT,serializeReader(reader));
}
@Override
public void onRequestReaderInput(@Nonnull ReaderInputOptions readerInputOptions) {
WritableMap readerOptionsMap = Arguments.createMap();
readerOptionsMap.putString(TEXT,readerInputOptions.toString());
sendEventWithName(EVENT_DID_REQUEST_READER_INPUT,readerOptionsMap);
}
@Override
public void onRequestReaderDisplayMessage(@Nonnull ReaderDisplayMessage readerDisplayMessage) {
HashMap<ReaderDisplayMessage, String> readerDisplayMessageToStringMap = new HashMap<>();
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.RETRY_CARD, "RetryCard");
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.INSERT_CARD, "InsertCard");
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.INSERT_OR_SWIPE_CARD, "InsertOrSwipeCard");
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.SWIPE_CARD, "SwipeCard");
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.REMOVE_CARD, "RemoveCard");
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.MULTIPLE_CONTACTLESS_CARDS_DETECTED, "MultipleContactlessCardsDetected");
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.TRY_ANOTHER_READ_METHOD, "TryAnotherReadMethod");
readerDisplayMessageToStringMap.put(ReaderDisplayMessage.TRY_ANOTHER_CARD, "TryAnotherCard");
sendEventWithName(EVENT_DID_REQUEST_READER_DISPLAY_MESSAGE, (String)readerDisplayMessageToStringMap.get(readerDisplayMessage));
}
@Override
public void onReportReaderSoftwareUpdateProgress(float v) {
sendEventWithName(EVENT_READER_SOFTWARE_UPDATE_PROGRESS,new Float(v));
}
}
|
/*******************************************************************************
* Copyright (c) 2013-2017 Contributors to the Eclipse Foundation
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License,
* Version 2.0 which accompanies this distribution and is available at
* http://www.apache.org/licenses/LICENSE-2.0.txt
******************************************************************************/
package mil.nga.giat.geowave.analytic.mapreduce.clustering;
import java.io.IOException;
import java.util.Iterator;
import java.util.UUID;
import mil.nga.giat.geowave.adapter.vector.FeatureDataAdapter;
import mil.nga.giat.geowave.analytic.AnalyticFeature;
import mil.nga.giat.geowave.analytic.ScopedJobConfiguration;
import mil.nga.giat.geowave.analytic.clustering.ClusteringUtils;
import mil.nga.giat.geowave.analytic.extract.DimensionExtractor;
import mil.nga.giat.geowave.analytic.extract.EmptyDimensionExtractor;
import mil.nga.giat.geowave.analytic.param.ExtractParameters;
import mil.nga.giat.geowave.analytic.param.GlobalParameters;
import mil.nga.giat.geowave.core.index.StringUtils;
import mil.nga.giat.geowave.mapreduce.GeoWaveConfiguratorBase;
import mil.nga.giat.geowave.mapreduce.GeoWaveReducer;
import mil.nga.giat.geowave.mapreduce.input.GeoWaveInputKey;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.mapreduce.ReduceContext;
import org.apache.hadoop.mapreduce.Reducer;
import org.geotools.feature.type.BasicFeatureTypes;
import org.opengis.feature.simple.SimpleFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.vividsolutions.jts.geom.Geometry;
/**
* Remove duplicate input objects and write out as a simple feature with
* geometry projected onto CRS EPSG:4326. The output feature contains the ID of
* the originating object. The intent is to create a light weight uniform object
* that reuses GeoWave data formats to feed analytic processes.
*
* If the input object does not require adjustment after de-duplication, use
* {@link mil.nga.giat.geowave.accumulo.mapreduce.dedupe.GeoWaveDedupReducer}
*
* OutputFeature Attributes, see
* {@link mil.nga.giat.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute}
*
* Context configuration parameters include:
*
* @formatter:off
*
*
* "SimpleFeatureOutputReducer.Extract.DimensionExtractClass" ->
* {@link DimensionExtractor} to extract non-geometric dimensions
*
* "SimpleFeatureOutputReducer.Extract.OutputDataTypeId" -> the
* name of the output SimpleFeature data type
*
* "SimpleFeatureOutputReducer.Global.BatchId" ->the id of the
* batch; defaults to current time in millis (for range
* comparisons)
*
*
* @formatter:on
*/
public class SimpleFeatureOutputReducer extends
GeoWaveReducer
{
protected DimensionExtractor<Object> dimExtractor;
protected String outputDataTypeID;
protected String batchID;
protected String groupID;
protected FeatureDataAdapter outputAdapter;
protected static final Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureOutputReducer.class);
@Override
protected void reduceNativeValues(
final GeoWaveInputKey key,
final Iterable<Object> values,
final ReduceContext<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, Object> context )
throws IOException,
InterruptedException {
final Iterator<Object> valIt = values.iterator();
if (valIt.hasNext()) {
key.setAdapterId(outputAdapter.getAdapterId());
final SimpleFeature feature = getSimpleFeature(
key,
valIt.next());
context.write(
key,
feature);
}
}
private SimpleFeature getSimpleFeature(
final GeoWaveInputKey key,
final Object entry ) {
final Geometry geometry = dimExtractor.getGeometry(entry);
final double[] extraDims = dimExtractor.getDimensions(entry);
final String inputID = StringUtils.stringFromBinary(key.getDataId().getBytes());
final SimpleFeature pointFeature = AnalyticFeature.createGeometryFeature(
outputAdapter.getFeatureType(),
batchID,
inputID,
inputID,
groupID,
0.0,
geometry,
dimExtractor.getDimensionNames(),
extraDims,
1,
1,
0);
return pointFeature;
}
@SuppressWarnings("unchecked")
@Override
protected void setup(
final Reducer<GeoWaveInputKey, ObjectWritable, GeoWaveInputKey, ObjectWritable>.Context context )
throws IOException,
InterruptedException {
super.setup(context);
final ScopedJobConfiguration config = new ScopedJobConfiguration(
context.getConfiguration(),
SimpleFeatureOutputReducer.class);
outputDataTypeID = config.getString(
ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID,
"reduced_features");
batchID = config.getString(
GlobalParameters.Global.BATCH_ID,
UUID.randomUUID().toString());
groupID = config.getString(
ExtractParameters.Extract.GROUP_ID,
UUID.randomUUID().toString());
try {
dimExtractor = config.getInstance(
ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS,
DimensionExtractor.class,
EmptyDimensionExtractor.class);
}
catch (final Exception e1) {
LOGGER.warn(
"Failed to instantiate " + GeoWaveConfiguratorBase.enumToConfKey(
SimpleFeatureOutputReducer.class,
ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS),
e1);
throw new IOException(
"Invalid configuration for " + GeoWaveConfiguratorBase.enumToConfKey(
SimpleFeatureOutputReducer.class,
ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS));
}
outputAdapter = AnalyticFeature.createGeometryFeatureAdapter(
outputDataTypeID,
dimExtractor.getDimensionNames(),
config.getString(
ExtractParameters.Extract.DATA_NAMESPACE_URI,
BasicFeatureTypes.DEFAULT_NAMESPACE),
ClusteringUtils.CLUSTERING_CRS);
}
}
|
package io.gonative.android;
import android.Manifest;
import android.annotation.TargetApi;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Message;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.GeolocationPermissions;
import android.webkit.JsResult;
import android.webkit.PermissionRequest;
import android.webkit.ValueCallback;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.widget.RelativeLayout;
import android.widget.Toast;
import java.util.ArrayList;
import io.gonative.android.library.AppConfig;
/**
* Created by weiyin on 2/2/15.
* Copyright 2014 GoNative.io LLC
*/
class GoNativeWebChromeClient extends WebChromeClient {
private MainActivity mainActivity;
private UrlNavigation urlNavigation;
private View customView;
private CustomViewCallback callback;
private boolean isFullScreen = false;
public GoNativeWebChromeClient(MainActivity mainActivity, UrlNavigation urlNavigation) {
this.mainActivity = mainActivity;
this.urlNavigation = urlNavigation;
}
@Override
public boolean onJsAlert(WebView view, String url, String message, JsResult result){
Toast.makeText(mainActivity, message, Toast.LENGTH_LONG).show();
result.confirm();
return true;
}
@Override
public void onGeolocationPermissionsShowPrompt(final String origin, final GeolocationPermissions.Callback callback) {
if (!AppConfig.getInstance(mainActivity).usesGeolocation) {
callback.invoke(origin, false, false);
return;
}
mainActivity.getRuntimeGeolocationPermission(new MainActivity.GeolocationPermissionCallback() {
@Override
public void onResult(boolean granted) {
// only retain if granted
callback.invoke(origin, granted, granted);
}
});
}
@Override
public void onShowCustomView(View view, CustomViewCallback callback) {
RelativeLayout fullScreen = this.mainActivity.getFullScreenLayout();
if (fullScreen == null) return;
this.customView = view;
this.callback = callback;
this.isFullScreen = true;
fullScreen.setVisibility(View.VISIBLE);
fullScreen.addView(view, new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT));
this.mainActivity.toggleFullscreen(this.isFullScreen);
}
@Override
public void onHideCustomView() {
this.customView = null;
this.isFullScreen = false;
RelativeLayout fullScreen = this.mainActivity.getFullScreenLayout();
if (fullScreen != null) {
fullScreen.setVisibility(View.INVISIBLE);
fullScreen.removeAllViews();
}
if (this.callback != null) {
callback.onCustomViewHidden();
}
this.mainActivity.toggleFullscreen(this.isFullScreen);
}
public boolean exitFullScreen() {
if (this.isFullScreen) {
onHideCustomView();
return true;
} else {
return false;
}
}
@Override
public void onCloseWindow(WebView window) {
if (mainActivity.isNotRoot()) mainActivity.finish();
}
@Override
@TargetApi(21)
// This method was added in Lollipop
public boolean onShowFileChooser(WebView webView, ValueCallback<Uri[]> filePathCallback, FileChooserParams fileChooserParams) {
// make sure there is no existing message
mainActivity.cancelFileUpload();
boolean multiple = false;
switch (fileChooserParams.getMode()) {
case FileChooserParams.MODE_OPEN:
multiple = false;
break;
case FileChooserParams.MODE_OPEN_MULTIPLE:
multiple = true;
break;
case FileChooserParams.MODE_SAVE:
default:
// MODE_SAVE is unimplemented
filePathCallback.onReceiveValue(null);
return false;
}
mainActivity.setUploadMessageLP(filePathCallback);
return urlNavigation.chooseFileUpload(fileChooserParams.getAcceptTypes(), multiple);
}
// For Android > 4.1
public void openFileChooser(ValueCallback<Uri> uploadMsg, String acceptType, String capture) {
// make sure there is no existing message
mainActivity.cancelFileUpload();
mainActivity.setUploadMessage(uploadMsg);
if (acceptType == null) acceptType = "*/*";
urlNavigation.chooseFileUpload(new String[]{acceptType});
}
// Android 3.0 +
public void openFileChooser(ValueCallback<Uri> uploadMsg, String acceptType) {
openFileChooser(uploadMsg, acceptType, null);
}
//Android 3.0
public void openFileChooser(ValueCallback<Uri> uploadMsg) {
openFileChooser(uploadMsg, null, null);
}
@Override
public void onReceivedTitle(WebView view, String title){
mainActivity.updatePageTitle();
}
@Override
public boolean onCreateWindow(WebView view, boolean isDialog, boolean isUserGesture, Message resultMsg) {
return urlNavigation.createNewWindow(resultMsg);
}
@Override
@TargetApi(21)
public void onPermissionRequest(final PermissionRequest request) {
String[] resources = request.getResources();
ArrayList<String> permissions = new ArrayList<>();
for (int i = 0; i < resources.length; i++) {
if (resources[i].equals(PermissionRequest.RESOURCE_AUDIO_CAPTURE)) {
permissions.add(Manifest.permission.RECORD_AUDIO);
permissions.add(Manifest.permission.MODIFY_AUDIO_SETTINGS);
} else if (resources[i].equals(PermissionRequest.RESOURCE_VIDEO_CAPTURE)) {
permissions.add(Manifest.permission.CAMERA);
}
}
String[] permissionsArray = new String[permissions.size()];
permissionsArray = permissions.toArray(permissionsArray);
mainActivity.getPermission(permissionsArray, new MainActivity.PermissionCallback() {
@Override
public void onPermissionResult(String[] permissions, int[] grantResults) {
ArrayList<String> grantedPermissions = new ArrayList<String>();
for (int i = 0; i < grantResults.length; i++) {
if (grantResults[i] != PackageManager.PERMISSION_GRANTED) {
continue;
}
if (permissions[i].equals(Manifest.permission.RECORD_AUDIO)) {
grantedPermissions.add(PermissionRequest.RESOURCE_AUDIO_CAPTURE);
} else if (permissions[i].equals(Manifest.permission.CAMERA)) {
grantedPermissions.add(PermissionRequest.RESOURCE_VIDEO_CAPTURE);
}
}
if (grantedPermissions.isEmpty()) {
request.deny();
} else {
String[] grantedPermissionsArray = new String[grantedPermissions.size()];
grantedPermissionsArray = grantedPermissions.toArray(grantedPermissionsArray);
request.grant(grantedPermissionsArray);
}
}
});
}
@Override
public void onPermissionRequestCanceled(PermissionRequest request) {
super.onPermissionRequestCanceled(request);
}
}
|
package action.test;
import java.util.Date;
import java.util.List;
import org.jessma.mvc.ActionSupport;
import org.jessma.mvc.FormBean;
public class CheckBean3 extends ActionSupport
{
// 待装配的属性
private String firstName;
private String lastName;
private Date birthday;
private boolean gender;
private int workingAge;
private List<Integer> interest;
private List<String> photos;
// 必须提供待装配属性的 get / set 方法
// getters & setters
// (略)......
/* **************************************** */
/* **** 进入 execute() 方法前,自动装配已完成 ***** */
// CheckBean3 的入口方法 'execute()' 中声明 @FormBean 注解
// 注意:没有注解参数,表单将与 CheckBean3 的相应属性自动装配
@FormBean
@Override
public String execute()
{
return SUCCESS;
}
public String getFirstName()
{
return firstName;
}
public void setFirstName(String firstName)
{
this.firstName = firstName;
}
public String getLastName()
{
return lastName;
}
public void setLastName(String lastName)
{
this.lastName = lastName;
}
public Date getBirthday()
{
return birthday;
}
public void setBirthday(Date birthday)
{
this.birthday = birthday;
}
public boolean isGender()
{
return gender;
}
public void setGender(boolean gender)
{
this.gender = gender;
}
public int getWorkingAge()
{
return workingAge;
}
public void setWorkingAge(int workingAge)
{
this.workingAge = workingAge;
}
public List<Integer> getInterest()
{
return interest;
}
public void setInterest(List<Integer> interest)
{
this.interest = interest;
}
public List<String> getPhotos()
{
return photos;
}
public void setPhotos(List<String> photos)
{
this.photos = photos;
}
}
|
package anon.playground;
import anon.playground.recipes.Recipe;
import anon.playground.recipes.SimpleMultiOutputRecipe;
import anon.playground.recipes.SimpleRandomRecipe;
import anon.playground.recipes.SimpleRecipe;
import org.bukkit.Material;
import org.bukkit.inventory.ItemStack;
public class TestingSoftware {
public static void main(String[] args) {
Recipe r1 = new SimpleRecipe(new ItemStack[]{new ItemStack(Material.COBBLESTONE)}, new ItemStack[]{new ItemStack(Material.COAL)}, 10, new ItemStack(Material.SMOOTH_STONE));
Recipe r2 = new SimpleRandomRecipe(new ItemStack[]{new ItemStack(Material.COBBLESTONE)}, new ItemStack[]{new ItemStack(Material.COAL)}, 10, new ItemStack[]{new ItemStack(Material.SMOOTH_STONE), new ItemStack(Material.STONE)});
Recipe r3 = new SimpleMultiOutputRecipe(new ItemStack[]{new ItemStack(Material.COBBLESTONE)}, new ItemStack[]{new ItemStack(Material.COAL)}, 10, new ItemStack[]{new ItemStack(Material.SMOOTH_STONE), new ItemStack(Material.STONE)});
Recipe[] recipes = {r1, r2, r3};
for (Recipe recipe : recipes) {
System.out.println("\n> " + recipe);
for (ItemStack itemStack : recipe.outputCraftedItem()) {
System.out.println(itemStack.getType());
}
}
}
}
|
/*
* USE - UML based specification environment
* Copyright (C) 1999-2004 Mark Richters, University of Bremen
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
// $Id: ObjectType.java 2740 2011-11-14 16:19:34Z lhamann $
package org.tzi.use.uml.ocl.type;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.tzi.use.uml.mm.MClass;
/**
* Type of objects. Object types are defined by the class of the object.
*
* @version $ProjectVersion: 0.393 $
* @author Mark Richters
*/
public final class ObjectType extends Type {
private MClass fClass;
/**
* Should only be called by MClass
* @param cls
*/
public ObjectType(MClass cls) {
fClass = cls;
}
public MClass cls() {
return fClass;
}
public boolean isTrueObjectType() {
return true;
}
public boolean isObjectType() {
return true;
}
/**
* Test subtype relation between this and <code>t</code>.
*/
public boolean isSubtypeOf(Type t) {
if (t.isTrueObjectType() ) {
MClass cls2 = ((ObjectType) t).cls();
return fClass.isSubClassOf(cls2);
}
return t.isTrueOclAny();
}
/**
* Returns the set of all supertypes (including this type).
*/
public Set<Type> allSupertypes() {
Set<Type> res = new HashSet<Type>();
res.add(this);
res.add(TypeFactory.mkOclAny());
Set<MClass> parents = fClass.allParents();
Iterator<MClass> clsIter = parents.iterator();
while (clsIter.hasNext() ) {
MClass cls = clsIter.next();
res.add(TypeFactory.mkObjectType(cls));
}
return res;
}
/**
* Return complete printable type name, e.g. 'Set(Bag(Integer))'.
*/
@Override
public StringBuilder toString(StringBuilder sb) {
return sb.append(fClass.name());
}
public boolean equals(Object obj) {
if (obj == null)
return false;
if (obj == this )
return true;
if (obj.getClass().equals(getClass()))
return fClass.equals(((ObjectType) obj).fClass);
return false;
}
public int hashCode() {
return fClass.hashCode();
}
}
|
/*
* Copyright 2013-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.netflix.eureka.server;
import java.net.URI;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import com.netflix.eureka.EurekaServerContext;
import com.netflix.eureka.EurekaServerContextHolder;
import com.netflix.eureka.registry.PeerAwareInstanceRegistry;
import com.netflix.eureka.registry.PeerAwareInstanceRegistryImpl;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.netflix.appinfo.AmazonInfo;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.appinfo.DataCenterInfo;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.config.ConfigurationManager;
import com.netflix.discovery.shared.Application;
import com.netflix.discovery.shared.Pair;
import com.netflix.eureka.cluster.PeerEurekaNode;
import com.netflix.eureka.resources.StatusResource;
import com.netflix.eureka.util.StatusInfo;
/**
* @author Spencer Gibb
*/
@Controller
@RequestMapping("${eureka.dashboard.path:/}")
public class EurekaController {
@Value("${eureka.dashboard.path:/}")
private String dashboardPath = "";
private ApplicationInfoManager applicationInfoManager;
public EurekaController(ApplicationInfoManager applicationInfoManager) {
this.applicationInfoManager = applicationInfoManager;
}
@RequestMapping(method = RequestMethod.GET)
public String status(HttpServletRequest request, Map<String, Object> model) {
populateBase(request, model);
populateApps(model);
StatusInfo statusInfo;
try {
statusInfo = new StatusResource().getStatusInfo();
}
catch (Exception e) {
statusInfo = StatusInfo.Builder.newBuilder().isHealthy(false).build();
}
model.put("statusInfo", statusInfo);
populateInstanceInfo(model, statusInfo);
return "eureka/status";
}
@RequestMapping(value = "/lastn", method = RequestMethod.GET)
public String lastn(HttpServletRequest request, Map<String, Object> model) {
populateBase(request, model);
PeerAwareInstanceRegistryImpl registry = (PeerAwareInstanceRegistryImpl) getRegistry();
ArrayList<Map<String, Object>> lastNCanceled = new ArrayList<>();
List<Pair<Long, String>> list = registry.getLastNCanceledInstances();
for (Pair<Long, String> entry : list) {
lastNCanceled.add(registeredInstance(entry.second(), entry.first()));
}
model.put("lastNCanceled", lastNCanceled);
list = registry.getLastNRegisteredInstances();
ArrayList<Map<String, Object>> lastNRegistered = new ArrayList<>();
for (Pair<Long, String> entry : list) {
lastNRegistered.add(registeredInstance(entry.second(), entry.first()));
}
model.put("lastNRegistered", lastNRegistered);
return "eureka/lastn";
}
private Map<String, Object> registeredInstance(String id, long date) {
HashMap<String, Object> map = new HashMap<>();
map.put("id", id);
map.put("date", new Date(date));
return map;
}
protected void populateBase(HttpServletRequest request, Map<String, Object> model) {
model.put("time", new Date());
model.put("basePath", "/");
model.put("dashboardPath", this.dashboardPath.equals("/") ? ""
: this.dashboardPath);
populateHeader(model);
populateNavbar(request, model);
}
private void populateHeader(Map<String, Object> model) {
model.put("currentTime", StatusResource.getCurrentTimeAsString());
model.put("upTime", StatusInfo.getUpTime());
model.put("environment", ConfigurationManager.getDeploymentContext()
.getDeploymentEnvironment());
model.put("datacenter", ConfigurationManager.getDeploymentContext()
.getDeploymentDatacenter());
PeerAwareInstanceRegistry registry = getRegistry();
model.put("registry", registry);
model.put("isBelowRenewThresold", registry.isBelowRenewThresold() == 1);
DataCenterInfo info = applicationInfoManager.getInfo().getDataCenterInfo();
if (info.getName() == DataCenterInfo.Name.Amazon) {
AmazonInfo amazonInfo = (AmazonInfo) info;
model.put("amazonInfo", amazonInfo);
model.put("amiId", amazonInfo.get(AmazonInfo.MetaDataKey.amiId));
model.put("availabilityZone",
amazonInfo.get(AmazonInfo.MetaDataKey.availabilityZone));
model.put("instanceId", amazonInfo.get(AmazonInfo.MetaDataKey.instanceId));
}
}
private PeerAwareInstanceRegistry getRegistry() {
return getServerContext().getRegistry();
}
private EurekaServerContext getServerContext() {
return EurekaServerContextHolder.getInstance().getServerContext();
}
private void populateNavbar(HttpServletRequest request, Map<String, Object> model) {
Map<String, String> replicas = new LinkedHashMap<>();
List<PeerEurekaNode> list = getServerContext().getPeerEurekaNodes().getPeerNodesView();
for (PeerEurekaNode node : list) {
try {
URI uri = new URI(node.getServiceUrl());
String href = node.getServiceUrl();
replicas.put(uri.getHost(), href);
}
catch (Exception ex) {
// ignore?
}
}
model.put("replicas", replicas.entrySet());
}
private void populateApps(Map<String, Object> model) {
List<Application> sortedApplications = getRegistry().getSortedApplications();
ArrayList<Map<String, Object>> apps = new ArrayList<>();
for (Application app : sortedApplications) {
LinkedHashMap<String, Object> appData = new LinkedHashMap<>();
apps.add(appData);
appData.put("name", app.getName());
Map<String, Integer> amiCounts = new HashMap<>();
Map<InstanceInfo.InstanceStatus, List<Pair<String, String>>> instancesByStatus = new HashMap<>();
Map<String, Integer> zoneCounts = new HashMap<>();
for (InstanceInfo info : app.getInstances()) {
String id = info.getId();
String url = info.getStatusPageUrl();
InstanceInfo.InstanceStatus status = info.getStatus();
String ami = "n/a";
String zone = "";
if (info.getDataCenterInfo().getName() == DataCenterInfo.Name.Amazon) {
AmazonInfo dcInfo = (AmazonInfo) info.getDataCenterInfo();
ami = dcInfo.get(AmazonInfo.MetaDataKey.amiId);
zone = dcInfo.get(AmazonInfo.MetaDataKey.availabilityZone);
}
Integer count = amiCounts.get(ami);
if (count != null) {
amiCounts.put(ami, count + 1);
}
else {
amiCounts.put(ami, 1);
}
count = zoneCounts.get(zone);
if (count != null) {
zoneCounts.put(zone, count + 1);
}
else {
zoneCounts.put(zone, 1);
}
List<Pair<String, String>> list = instancesByStatus.get(status);
if (list == null) {
list = new ArrayList<>();
instancesByStatus.put(status, list);
}
list.add(new Pair<>(id, url));
}
appData.put("amiCounts", amiCounts.entrySet());
appData.put("zoneCounts", zoneCounts.entrySet());
ArrayList<Map<String, Object>> instanceInfos = new ArrayList<>();
appData.put("instanceInfos", instanceInfos);
for (Iterator<Map.Entry<InstanceInfo.InstanceStatus, List<Pair<String, String>>>> iter = instancesByStatus
.entrySet().iterator(); iter.hasNext();) {
Map.Entry<InstanceInfo.InstanceStatus, List<Pair<String, String>>> entry = iter
.next();
List<Pair<String, String>> value = entry.getValue();
InstanceInfo.InstanceStatus status = entry.getKey();
LinkedHashMap<String, Object> instanceData = new LinkedHashMap<>();
instanceInfos.add(instanceData);
instanceData.put("status", entry.getKey());
ArrayList<Map<String, Object>> instances = new ArrayList<>();
instanceData.put("instances", instances);
instanceData.put("isNotUp", status != InstanceInfo.InstanceStatus.UP);
// TODO
/*
* if(status != InstanceInfo.InstanceStatus.UP){
* buf.append("<font color=red size=+1><b>"); }
* buf.append("<b>").append(status
* .name()).append("</b> (").append(value.size()).append(") - ");
* if(status != InstanceInfo.InstanceStatus.UP){
* buf.append("</font></b>"); }
*/
for (Pair<String, String> p : value) {
LinkedHashMap<String, Object> instance = new LinkedHashMap<>();
instances.add(instance);
instance.put("id", p.first());
instance.put("url", p.second());
instance.put("isHref", p.second().startsWith("http"));
/*
* String id = p.first(); String url = p.second(); if(url != null &&
* url.startsWith("http")){
* buf.append("<a href=\"").append(url).append("\">"); }else { url =
* null; } buf.append(id); if(url != null){ buf.append("</a>"); }
* buf.append(", ");
*/
}
}
// out.println("<td>" + buf.toString() + "</td></tr>");
}
model.put("apps", apps);
}
private void populateInstanceInfo(Map<String, Object> model, StatusInfo statusInfo) {
InstanceInfo instanceInfo = statusInfo.getInstanceInfo();
Map<String, String> instanceMap = new HashMap<>();
instanceMap.put("ipAddr", instanceInfo.getIPAddr());
instanceMap.put("status", instanceInfo.getStatus().toString());
if (instanceInfo.getDataCenterInfo().getName() == DataCenterInfo.Name.Amazon) {
AmazonInfo info = (AmazonInfo) instanceInfo.getDataCenterInfo();
instanceMap.put("availability-zone",
info.get(AmazonInfo.MetaDataKey.availabilityZone));
instanceMap.put("public-ipv4", info.get(AmazonInfo.MetaDataKey.publicIpv4));
instanceMap.put("instance-id", info.get(AmazonInfo.MetaDataKey.instanceId));
instanceMap.put("public-hostname",
info.get(AmazonInfo.MetaDataKey.publicHostname));
instanceMap.put("ami-id", info.get(AmazonInfo.MetaDataKey.amiId));
instanceMap.put("instance-type",
info.get(AmazonInfo.MetaDataKey.instanceType));
}
model.put("instanceInfo", instanceMap);
}
}
|
package growdy;
import growdy.exceptions.ParseException;
import java.io.IOException;
import org.junit.After;
import org.junit.Test;
import org.junit.Before;
import static org.junit.Assert.*;
/**
*
* @author Richard
*/
public class RowdyLexerTest {
private RowdyLexer lexer;
private final String[] reserved = {"add", "+", "-"};
private final String operators = "+ -";
public RowdyLexerTest() {
}
@Before
public void setUp() throws IOException {
lexer = new RowdyLexer(reserved, operators);
try {
lexer.parseLine("add a + 25 - 1");
} catch (ParseException ex) {
fail("Lexer failed to lex");
}
}
@After
public void tearDown() {
}
@Test
public void tokenCountTest() {
Integer numberOfTokens = lexer.tokenCount();
Integer expectedCount = 7;
assertEquals("The number of Tokens is incorrect", expectedCount, numberOfTokens);
}
/**
* Test of hasToken method, of class RowdyLexer.
*/
@Test
public void testHasToken() {
Boolean hasToken = lexer.hasToken();
assertTrue("The number of Tokens is incorrect", hasToken);
}
/**
* Test of getToken method, of class RowdyLexer.
*/
@Test
public void testGetToken() {
Token token;
Integer[] expectedIds = {0, 0, 1, 1, 2, 1, 200};
for (Integer expectedId : expectedIds){
token = lexer.getToken();
Integer tokenId = token.getID();
assertEquals("Token id mismatch", expectedId, tokenId);
}
}
/**
* Test of parse method, of class RowdyLexer.
* @throws java.lang.Exception
*/
public void testParse() throws Exception {
}
/**
* Test of parseLine method, of class RowdyLexer.
*/
public void testParseLine() {
}
/**
* Test of parseCode method, of class RowdyLexer.
*/
public void testParseCode() {
}
/**
* Test of tokenCount method, of class RowdyLexer.
*/
public void testTokenCount() {
}
}
|
package com.github.obsidianarch.gvengine.core;
/**
* The three supported position systems by the VertexBufferObject.
*
* @author Austin
* @version 14.03.30
* @since 14.03.30
*/
public enum PositionSystem
{
/**
* Only two coordinates will be rendered.
*/
XY( 2 ),
/**
* Three coordinates will be used.
*/
XYZ( 3 ),
/**
* All four coordinates will be used.
*/
XYZW( 4 );
/**
* The number of coordinates in the system.
*/
public final int coordinates;
/**
* @param i
* The number of coordinates in the system.
*
* @since 14.03.30
*/
PositionSystem( int i )
{
coordinates = i;
}
}
|
package works.lionel.saber.controller;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.reactive.function.server.RouterFunctions;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import works.lionel.saber.KnolClient;
import works.lionel.saber.model.Knol;
import works.lionel.saber.repository.KnolRepository;
import java.time.Duration;
import java.util.Random;
/**
*
*/
@RestController
@CrossOrigin
public class MainController {
// KnolRepository knolRepository;
//
// public MainController(KnolRepository knolRepository) {
// this.knolRepository = knolRepository;
// }
//
// @GetMapping("/teapot")
// @ResponseStatus(HttpStatus.I_AM_A_TEAPOT)
// public String getHello() {
// return "Hell world!";
// }
//
// @PostMapping("/new")
// @ResponseStatus(HttpStatus.CREATED)
// public Mono<Knol> getKnol(@RequestBody Knol knol) {
// return knolRepository
// .save(knol);
// }
//
// @GetMapping(value = "test", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
// public Flux<String> getMsg(){
// return Flux.fromStream(new Random()
// .ints(10)
// .mapToObj(value -> "this is data " + value))
// .delayElements(Duration.ofSeconds(1))
// .repeat();
// }
//
// @GetMapping(value = "/all")
// public Flux<Knol> getAll() {
// return knolRepository
// .findAll();
// }
}
|
package service;
import entity.User;
import org.apache.shiro.crypto.RandomNumberGenerator;
import org.apache.shiro.crypto.SecureRandomNumberGenerator;
import org.apache.shiro.crypto.hash.SimpleHash;
import org.apache.shiro.util.ByteSource;
public class PasswordHelper {
private RandomNumberGenerator randomNumberGenerator = new SecureRandomNumberGenerator();
private String algorithmName = "md5";
private int hashIterations = 2;
public void setRandomNumberGenerator(RandomNumberGenerator randomNumberGenerator) {
this.randomNumberGenerator = randomNumberGenerator;
}
public void setAlgorithmName(String algorithmName) {
this.algorithmName = algorithmName;
}
public void setHashIterations(int hashIterations) {
this.hashIterations = hashIterations;
}
public void encryptPassword(User user) {
user.setSalt(randomNumberGenerator.nextBytes().toHex());
String newPassword = new SimpleHash(
algorithmName,
user.getPassword(),
ByteSource.Util.bytes(user.getCredentialsSalt()),
hashIterations).toHex();
user.setPassword(newPassword);
}
}
|
package eyja.lab.tools.control.centre.management;
import java.util.Objects;
/**
* The ResourceID class represents the ID consisting of its origin and its
* individual ID.
*
* @author Planters
*
*/
public final class ResourceID {
private final Origin origin;
private final long id;
/**
* Create a new resource ID compromised out of an origin and a origin unique ID.
*
* @param origin - the origin this ID belongs to
* @param id - the origin unique identifier
*/
public ResourceID(Origin origin, long id) {
this.origin = origin;
this.id = id;
}
/**
* Get the origin the ID belongs to.
*
* @return the origin containing the ID
*/
public Origin getOrigin() {
return this.origin;
}
/**
* Get the origin unique ID of this resource.
*
* @return the resource's ID
*/
public long getID() {
return this.id;
}
@Override
public String toString() {
return String.format("[%s:%s]", this.getOrigin(), this.getID());
}
@Override
public int hashCode() {
return Long.hashCode(this.id) * 31 + Objects.hashCode(this.origin);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof ResourceID) {
ResourceID comp = (ResourceID) obj;
return Objects.equals(this.origin, comp.origin) && this.id == comp.id;
}
return false;
}
}
|
/*
* (C) Copyright 2021 Radix DLT Ltd
*
* Radix DLT Ltd licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the
* License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package com.radixdlt.utils.functional;
import com.google.common.collect.ImmutableMap;
import java.util.AbstractMap.SimpleImmutableEntry;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public interface FunctionalUtils {
/**
* Use this method when it's necessary to pick last element from the stream.
* <p>
* For example:
* <pre>
* var lastElement = ...
* .stream()
* .reduce(FunctionalUtils::findLast);
* </pre>
*/
static <T> T findLast(T first, T second) {
return second;
}
/**
* Use this method to return part of iterable starting from element
* right after one which matched the predicated.
*
* @param input Source iterable
* @param predicate Predicate to test
*
* @return List consisting of the elements from input iterable which were found
* after the predicate match. Empty list if match not found.
*/
static <T> List<T> skipUntil(Iterable<T> input, Predicate<T> predicate) {
var output = new ArrayList<T>();
var found = false;
for (var info : input) {
if (predicate.test(info)) {
found = true;
continue;
}
if (found) {
output.add(info);
}
}
return output;
}
/**
* This method takes map and new entry and returns new map where existing entry with same key as new entry,
* is get replaced with new entry. If no entry with same key exists, then new entry is added to resulting map.
* Input map remains intact, returned map is a new map instance.
*
* @param newEntry the entry which will be put into new map
* @param existingMap input map
*
* @return new map with old entry replaced with new entry
*/
static <K, V> Map<K, V> replaceEntry(Map.Entry<K, V> newEntry, Map<K, V> existingMap) {
return Stream.concat(
Stream.of(newEntry),
existingMap.entrySet().stream().filter(e -> !newEntry.getKey().equals(e.getKey()))
).collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, Map.Entry::getValue));
}
/**
* This method takes a map and returns new map with entry with specified key removed.
*
* @param keyToRemove the key to remove
* @param existingMap input map
*
* @return new map with specified key removed
*/
static <K, V> Map<K, V> removeKey(K keyToRemove, Map<K, V> existingMap) {
return existingMap.entrySet().stream()
.filter(e -> !keyToRemove.equals(e.getKey()))
.collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, Map.Entry::getValue));
}
/**
* Return copy of the input set with specified element removed.
*
* @param element element to remove
* @param input input set
*
* @return new set with specified element removed
*/
static <T> Set<T> removeElement(T element, Set<T> input) {
return input.stream().filter(e -> !e.equals(element)).collect(Collectors.toSet());
}
/**
* Return copy of the input set with provided element added.
*
* @param element element to add
* @param input input set
*
* @return new set with provided element added
*/
static <T> Set<T> addElement(T element, Set<T> input) {
return Stream.concat(input.stream(), Stream.of(element)).collect(Collectors.toSet());
}
/**
* Merge several sets into one.
*
* @param inputs sets to merge
*
* @return merged set
*/
@SafeVarargs
static <T> Set<T> mergeAll(Set<T>... inputs) {
var output = new HashSet<T>();
for (var input : inputs) {
output.addAll(input);
}
return Set.copyOf(output);
}
/**
* Create new immutable map entry.
*
* @param key entry key
* @param value entry value
*
* @return created entry
*/
static <K, V> Map.Entry<K, V> newEntry(K key, V value) {
return new SimpleImmutableEntry<>(key, value);
}
}
|
package me.hypocrite30.rpc.core.remote.transport.netty.client;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import io.netty.handler.timeout.IdleStateHandler;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import me.hypocrite30.rpc.common.enums.CompressTypeEnum;
import me.hypocrite30.rpc.common.enums.SerializationTypeEnum;
import me.hypocrite30.rpc.common.extension.ExtensionLoader;
import me.hypocrite30.rpc.common.factory.SingletonFactory;
import me.hypocrite30.rpc.core.registry.ServiceDiscovery;
import me.hypocrite30.rpc.core.remote.dto.RpcEntity;
import me.hypocrite30.rpc.core.remote.dto.RpcRequest;
import me.hypocrite30.rpc.core.remote.dto.RpcResponse;
import me.hypocrite30.rpc.core.remote.transport.RequestTransporter;
import me.hypocrite30.rpc.core.remote.transport.netty.codec.RpcCodecConstants;
import me.hypocrite30.rpc.core.remote.transport.netty.codec.RpcMessageDecoder;
import me.hypocrite30.rpc.core.remote.transport.netty.codec.RpcMessageEncoder;
import java.net.InetSocketAddress;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
/**
* manage Bootstrap object
*
* @Author: Hypocrite30
* @Date: 2022/1/4 12:32
*/
@Slf4j
public class NettyRpcClient implements RequestTransporter {
private final ServiceDiscovery serviceDiscovery;
private final UnprocessedRequests unprocessedRequests;
private final ChannelProvider channelProvider;
private final Bootstrap bootstrap;
private final EventLoopGroup eventLoopGroup;
public NettyRpcClient() {
// initialize resources
eventLoopGroup = new NioEventLoopGroup();
bootstrap = new Bootstrap();
bootstrap.group(eventLoopGroup)
.channel(NioSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
// timeout period of the connection is 5s
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 5000)
.handler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel socketChannel) {
ChannelPipeline pipeline = socketChannel.pipeline();
// If no data is sent to the server within 15 seconds, a heartbeat request is sent
pipeline.addLast(new IdleStateHandler(0, 5, 0, TimeUnit.SECONDS));
pipeline.addLast(new RpcMessageEncoder());
pipeline.addLast(new RpcMessageDecoder());
pipeline.addLast(new NettyRpcClientHandler());
}
});
this.serviceDiscovery = ExtensionLoader.getExtensionLoader(ServiceDiscovery.class).getExtension("etcd");
this.unprocessedRequests = SingletonFactory.getInstance(UnprocessedRequests.class);
this.channelProvider = SingletonFactory.getInstance(ChannelProvider.class);
}
@Override
public Object sendRpcRequest(RpcRequest rpcRequest) {
CompletableFuture<RpcResponse<Object>> resultFuture = new CompletableFuture<>();
InetSocketAddress inetSocketAddress = serviceDiscovery.findService(rpcRequest);
// get server address related channel
Channel channel = getChannel(inetSocketAddress);
if (channel.isActive()) {
// put unprocessed request
unprocessedRequests.put(rpcRequest.getRequestId(), resultFuture);
RpcEntity rpcEntity = RpcEntity.builder()
.data(rpcRequest)
.codec(SerializationTypeEnum.PROTOSTUFF.getCode())
.compressType(CompressTypeEnum.GZIP.getCode())
.messageType(RpcCodecConstants.REQUEST_TYPE).build();
channel.writeAndFlush(rpcEntity).addListener((ChannelFutureListener) future -> {
if (future.isSuccess()) {
log.info("client send message: [{}]", rpcEntity);
} else {
future.channel().close();
resultFuture.completeExceptionally(future.cause());
log.error("Send failed: ", future.cause());
}
});
} else {
throw new IllegalStateException();
}
return resultFuture;
}
/**
* get channel according to socket address and do connect
*
* @param inetSocketAddress socket address
* @return connected channel
*/
public Channel getChannel(InetSocketAddress inetSocketAddress) {
Channel channel = channelProvider.get(inetSocketAddress);
if (channel == null) {
channel = doConnect(inetSocketAddress);
channelProvider.set(inetSocketAddress, channel);
}
return channel;
}
/**
* bootstrap connect socket address
*
* @param inetSocketAddress socket address
* @return connected channel
*/
@SneakyThrows
public Channel doConnect(InetSocketAddress inetSocketAddress) {
CompletableFuture<Channel> completableFuture = new CompletableFuture<>();
bootstrap.connect(inetSocketAddress).addListener((ChannelFutureListener) future -> {
if (future.isSuccess()) {
log.info("The client has connected [{}] successful!", inetSocketAddress.toString());
completableFuture.complete(future.channel());
} else {
throw new IllegalStateException();
}
});
return completableFuture.get();
}
public void close() {
eventLoopGroup.shutdownGracefully();
}
}
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;
import java.io.Serializable;
/**
* A {@link SkyKey} is effectively a pair (type, name) that identifies a Skyframe value.
*
* <p>SkyKey implementations are heavily used as map keys. Thus, they should have fast {@link
* #hashCode} implementations (cached if necessary). The same SkyKey may be created multiple times
* by different {@code SkyFunction}s requesting it, and so it should have effective interning. There
* will likely be more SkyKeys on the JVM heap than any other non-native type, so be mindful of
* memory usage (in particular object wrapper size and memory alignment)! Typically the
* implementation should have a fixed {@link #functionName} implementation and return itself as the
* {@link #argument} in order to reduce the cost of wrapper objects.
*/
public interface SkyKey extends Serializable {
SkyFunctionName functionName();
default Object argument() {
return this;
}
default ShareabilityOfValue getShareabilityOfValue() {
return functionName().getShareabilityOfValue();
}
}
|
package org.batfish.representation.juniper;
import org.batfish.datamodel.Configuration;
import org.batfish.datamodel.IpAccessListLine;
import org.batfish.datamodel.SubRange;
import org.batfish.common.Warnings;
public class FwFromIcmpType extends FwFrom {
/**
*
*/
private static final long serialVersionUID = 1L;
private SubRange _icmpTypeRange;
public FwFromIcmpType(SubRange icmpTypeRange) {
_icmpTypeRange = icmpTypeRange;
}
@Override
public void applyTo(IpAccessListLine line, JuniperConfiguration jc,
Warnings w, Configuration c) {
line.getIcmpTypes().add(_icmpTypeRange);
}
}
|
package com.today.step.lib;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* 用来记录当天步数列表,传感器回调30次记录一条数据
* Created by jiahongfei on 2017/10/9.
*/
class TodayStepDBHelper extends SQLiteOpenHelper implements ITodayStepDBHelper {
private static final String TAG = "TodayStepDBHelper";
private static final String DATE_PATTERN_YYYY_MM_DD = "yyyy-MM-dd";
private static final int VERSION = 1;
private static final String DATABASE_NAME = "TodayStepDB.db";
private static final String TABLE_NAME = "TodayStepData";
private static final String PRIMARY_KEY = "_id";
public static final String TODAY = "today";
public static final String DATE = "date";
public static final String STEP = "step";
private static final String SQL_CREATE_TABLE = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " ("
+ PRIMARY_KEY + " INTEGER PRIMARY KEY AUTOINCREMENT, "
+ TODAY + " TEXT, "
+ DATE + " long, "
+ STEP + " long);";
private static final String SQL_DELETE_TABLE = "DROP TABLE IF EXISTS " + TABLE_NAME;
private static final String SQL_QUERY_ALL = "SELECT * FROM " + TABLE_NAME;
private static final String SQL_QUERY_STEP = "SELECT * FROM " + TABLE_NAME + " WHERE " + TODAY + " = ? AND " + STEP + " = ?";
private static final String SQL_QUERY_STEP_BY_DATE = "SELECT * FROM " + TABLE_NAME + " WHERE " + TODAY + " = ?";
private static final String SQL_DELETE_TODAY = "DELETE FROM " + TABLE_NAME + " WHERE " + TODAY + " = ?";
private static final String SQL_QUERY_STEP_ORDER_BY = "SELECT * FROM " + TABLE_NAME + " WHERE " + TODAY + " = ? ORDER BY " + STEP + " DESC";
//只保留mLimit天的数据
private int mLimit = -1;
public static ITodayStepDBHelper factory(Context context) {
return new TodayStepDBHelper(context);
}
private TodayStepDBHelper(Context context) {
super(context, DATABASE_NAME, null, VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(SQL_CREATE_TABLE);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
deleteTable();
onCreate(db);
}
@Override
public synchronized boolean isExist(TodayStepData todayStepData) {
Cursor cursor = getReadableDatabase().rawQuery(SQL_QUERY_STEP, new String[]{todayStepData.getToday(), todayStepData.getStep() + ""});
boolean exist = cursor.getCount() > 0 ? true : false;
cursor.close();
return exist;
}
@Override
public synchronized void createTable() {
getWritableDatabase().execSQL(SQL_CREATE_TABLE);
}
@Override
public synchronized void insert(TodayStepData todayStepData) {
ContentValues contentValues = new ContentValues();
contentValues.put(TODAY, todayStepData.getToday());
contentValues.put(DATE, todayStepData.getDate());
contentValues.put(STEP, todayStepData.getStep());
getWritableDatabase().insert(TABLE_NAME, null, contentValues);
}
@Override
public synchronized List<TodayStepData> getQueryAll() {
Cursor cursor = getReadableDatabase().rawQuery(SQL_QUERY_ALL, new String[]{});
List<TodayStepData> todayStepDatas = getTodayStepDataList(cursor);
cursor.close();
return todayStepDatas;
}
/**
* 获取最大步数,根据时间
*
* @return
*/
@Override
public synchronized TodayStepData getMaxStepByDate(long millis) {
Cursor cursor = getReadableDatabase().rawQuery(SQL_QUERY_STEP_ORDER_BY, new String[]{DateUtils.dateFormat(millis, "yyyy-MM-dd")});
TodayStepData todayStepData = null;
if (cursor.getCount() > 0) {
cursor.moveToNext();
todayStepData = getTodayStepData(cursor);
}
cursor.close();
return todayStepData;
}
/**
* 根据时间获取步数列表
*
* @param dateString 格式yyyy-MM-dd
* @return
*/
@Override
public synchronized List<TodayStepData> getStepListByDate(String dateString) {
Cursor cursor = getReadableDatabase().rawQuery(SQL_QUERY_STEP_BY_DATE, new String[]{dateString});
List<TodayStepData> todayStepDatas = getTodayStepDataList(cursor);
cursor.close();
return todayStepDatas;
}
/**
* 根据时间和天数获取步数列表
* 例如:
* startDate = 2018-01-15
* days = 3
* 获取 2018-01-15、2018-01-16、2018-01-17三天的步数
*
* @param startDate 格式yyyy-MM-dd
* @param days
* @return
*/
@Override
public synchronized List<TodayStepData> getStepListByStartDateAndDays(String startDate, int days) {
List<TodayStepData> todayStepDatas = new ArrayList<>();
for (int i = 0; i < days; i++) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(DateUtils.getDateMillis(startDate, DATE_PATTERN_YYYY_MM_DD));
calendar.add(Calendar.DAY_OF_YEAR, i);
Cursor cursor = getReadableDatabase().rawQuery(SQL_QUERY_STEP_BY_DATE,
new String[]{DateUtils.dateFormat(calendar.getTimeInMillis(), DATE_PATTERN_YYYY_MM_DD)});
todayStepDatas.addAll(getTodayStepDataList(cursor));
cursor.close();
}
return todayStepDatas;
}
private List<TodayStepData> getTodayStepDataList(Cursor cursor) {
List<TodayStepData> todayStepDatas = new ArrayList<>();
while (cursor.moveToNext()) {
TodayStepData todayStepData = getTodayStepData(cursor);
todayStepDatas.add(todayStepData);
}
return todayStepDatas;
}
private TodayStepData getTodayStepData(Cursor cursor){
String today = cursor.getString(cursor.getColumnIndex(TODAY));
long date = cursor.getLong(cursor.getColumnIndex(DATE));
long step = cursor.getLong(cursor.getColumnIndex(STEP));
TodayStepData todayStepData = new TodayStepData();
todayStepData.setToday(today);
todayStepData.setDate(date);
todayStepData.setStep(step);
return todayStepData;
}
/**
* 根据limit来清除数据库
* 例如:
* curDate = 2018-01-10 limit=0;表示只保留2018-01-10
* curDate = 2018-01-10 limit=1;表示保留2018-01-10、2018-01-09等
*
* @param curDate
* @param limit -1失效
*/
@Override
public synchronized void clearCapacity(String curDate, int limit) {
mLimit = limit;
if (mLimit <= 0) {
return;
}
try {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(DateUtils.getDateMillis(curDate, DATE_PATTERN_YYYY_MM_DD));
calendar.add(Calendar.DAY_OF_YEAR, -(mLimit));
String date = DateUtils.dateFormat(calendar.getTimeInMillis(), DATE_PATTERN_YYYY_MM_DD);
Log.e(TAG, date);
List<TodayStepData> todayStepDataList = getQueryAll();
Set<String> delDateSet = new HashSet<>();
for (TodayStepData tmpTodayStepData : todayStepDataList) {
long dbTodayDate = DateUtils.getDateMillis(tmpTodayStepData.getToday(), DATE_PATTERN_YYYY_MM_DD);
if (calendar.getTimeInMillis() >= dbTodayDate) {
delDateSet.add(tmpTodayStepData.getToday());
}
}
for (String delDate : delDateSet) {
getWritableDatabase().execSQL(SQL_DELETE_TODAY, new String[]{delDate});
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public synchronized void deleteTable() {
getWritableDatabase().execSQL(SQL_DELETE_TABLE);
}
}
|
/*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.kernel.modeshape.utils.impl;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
import javax.jcr.Property;
import javax.jcr.RepositoryException;
import javax.jcr.Value;
import org.fcrepo.kernel.api.utils.CacheEntry;
import org.fcrepo.kernel.modeshape.utils.ExternalResourceCacheEntry;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/**
* <p>CacheEntryFactoryTest class.</p>
*
* @author lsitu
*/
@RunWith(MockitoJUnitRunner.class)
public class CacheEntryFactoryTest {
@Mock
private Property mockProperty;
@Mock
private Value mockValue;
public static String RESOURCE_URL = "http://www.example.com/file";
@Before
public void setUp() throws RepositoryException {
when(mockProperty.getValue()).thenReturn(mockValue);
when(mockProperty.getName()).thenReturn("fedora:proxyFor");
when(mockValue.getString()).thenReturn(RESOURCE_URL);
}
@Test
public void testForProperty() throws RepositoryException {
final CacheEntry instance = CacheEntryFactory.forProperty(mockProperty);
assertTrue("CacheEntry class isn't correct", instance instanceof ExternalResourceCacheEntry);
}
}
|
package com.smartdevicelink.proxy;
import android.annotation.TargetApi;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import android.support.annotation.NonNull;
import android.telephony.TelephonyManager;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.SparseArray;
import android.util.SparseIntArray;
import android.view.Display;
import android.view.InputDevice;
import android.view.MotionEvent;
import android.view.Surface;
import com.smartdevicelink.Dispatcher.IDispatchingStrategy;
import com.smartdevicelink.Dispatcher.ProxyMessageDispatcher;
import com.smartdevicelink.SdlConnection.ISdlConnectionListener;
import com.smartdevicelink.SdlConnection.SdlConnection;
import com.smartdevicelink.SdlConnection.SdlSession;
import com.smartdevicelink.SdlConnection.SdlSession2;
import com.smartdevicelink.encoder.VirtualDisplayEncoder;
import com.smartdevicelink.exception.SdlException;
import com.smartdevicelink.exception.SdlExceptionCause;
import com.smartdevicelink.haptic.HapticInterfaceManager;
import com.smartdevicelink.marshal.JsonRPCMarshaller;
import com.smartdevicelink.protocol.ProtocolMessage;
import com.smartdevicelink.protocol.enums.FunctionID;
import com.smartdevicelink.protocol.enums.MessageType;
import com.smartdevicelink.protocol.enums.SessionType;
import com.smartdevicelink.protocol.heartbeat.HeartbeatMonitor;
import com.smartdevicelink.proxy.LockScreenManager.OnLockScreenIconDownloadedListener;
import com.smartdevicelink.proxy.callbacks.InternalProxyMessage;
import com.smartdevicelink.proxy.callbacks.OnError;
import com.smartdevicelink.proxy.callbacks.OnProxyClosed;
import com.smartdevicelink.proxy.callbacks.OnServiceEnded;
import com.smartdevicelink.proxy.callbacks.OnServiceNACKed;
import com.smartdevicelink.proxy.interfaces.IAudioStreamListener;
import com.smartdevicelink.proxy.interfaces.IProxyListenerBase;
import com.smartdevicelink.proxy.interfaces.IPutFileResponseListener;
import com.smartdevicelink.proxy.interfaces.ISdl;
import com.smartdevicelink.proxy.interfaces.ISdlServiceListener;
import com.smartdevicelink.proxy.interfaces.IVideoStreamListener;
import com.smartdevicelink.proxy.interfaces.OnSystemCapabilityListener;
import com.smartdevicelink.proxy.rpc.*;
import com.smartdevicelink.proxy.rpc.enums.AppHMIType;
import com.smartdevicelink.proxy.rpc.enums.AudioStreamingState;
import com.smartdevicelink.proxy.rpc.enums.AudioType;
import com.smartdevicelink.proxy.rpc.enums.BitsPerSample;
import com.smartdevicelink.proxy.rpc.enums.ButtonName;
import com.smartdevicelink.proxy.rpc.enums.DriverDistractionState;
import com.smartdevicelink.proxy.rpc.enums.FileType;
import com.smartdevicelink.proxy.rpc.enums.GlobalProperty;
import com.smartdevicelink.proxy.rpc.enums.HMILevel;
import com.smartdevicelink.proxy.rpc.enums.ImageType;
import com.smartdevicelink.proxy.rpc.enums.InteractionMode;
import com.smartdevicelink.proxy.rpc.enums.Language;
import com.smartdevicelink.proxy.rpc.enums.PrerecordedSpeech;
import com.smartdevicelink.proxy.rpc.enums.RequestType;
import com.smartdevicelink.proxy.rpc.enums.Result;
import com.smartdevicelink.proxy.rpc.enums.SamplingRate;
import com.smartdevicelink.proxy.rpc.enums.SdlConnectionState;
import com.smartdevicelink.proxy.rpc.enums.SdlDisconnectedReason;
import com.smartdevicelink.proxy.rpc.enums.SdlInterfaceAvailability;
import com.smartdevicelink.proxy.rpc.enums.SystemCapabilityType;
import com.smartdevicelink.proxy.rpc.enums.TextAlignment;
import com.smartdevicelink.proxy.rpc.enums.TouchType;
import com.smartdevicelink.proxy.rpc.enums.UpdateMode;
import com.smartdevicelink.proxy.rpc.listeners.OnMultipleRequestListener;
import com.smartdevicelink.proxy.rpc.listeners.OnPutFileUpdateListener;
import com.smartdevicelink.proxy.rpc.listeners.OnRPCListener;
import com.smartdevicelink.proxy.rpc.listeners.OnRPCNotificationListener;
import com.smartdevicelink.proxy.rpc.listeners.OnRPCResponseListener;
import com.smartdevicelink.security.SdlSecurityBase;
import com.smartdevicelink.streaming.StreamRPCPacketizer;
import com.smartdevicelink.streaming.audio.AudioStreamingCodec;
import com.smartdevicelink.streaming.audio.AudioStreamingParams;
import com.smartdevicelink.streaming.video.SdlRemoteDisplay;
import com.smartdevicelink.streaming.video.VideoStreamingParameters;
import com.smartdevicelink.trace.SdlTrace;
import com.smartdevicelink.trace.TraceDeviceInfo;
import com.smartdevicelink.trace.enums.InterfaceActivityDirection;
import com.smartdevicelink.transport.BaseTransportConfig;
import com.smartdevicelink.transport.MultiplexTransportConfig;
import com.smartdevicelink.transport.SiphonServer;
import com.smartdevicelink.transport.USBTransportConfig;
import com.smartdevicelink.transport.enums.TransportType;
import com.smartdevicelink.util.CorrelationIdGenerator;
import com.smartdevicelink.util.DebugTool;
import com.smartdevicelink.util.Version;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Hashtable;
import java.util.List;
import java.util.Vector;
import java.util.concurrent.Callable;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import java.util.concurrent.ScheduledExecutorService;
@SuppressWarnings({"WeakerAccess", "Convert2Diamond"})
public abstract class SdlProxyBase<proxyListenerType extends IProxyListenerBase> {
// Used for calls to Android Log class.
public static final String TAG = "SdlProxy";
private static final String SDL_LIB_TRACE_KEY = "42baba60-eb57-11df-98cf-0800200c9a66";
private static final int PROX_PROT_VER_ONE = 1;
private static final int RESPONSE_WAIT_TIME = 2000;
public static final com.smartdevicelink.util.Version MAX_SUPPORTED_RPC_VERSION = new com.smartdevicelink.util.Version("5.0.0");
private SdlSession sdlSession = null;
private proxyListenerType _proxyListener = null;
protected Service _appService = null;
private Context _appContext;
private String sPoliciesURL = ""; //for testing only
// Protected Correlation IDs
private final int REGISTER_APP_INTERFACE_CORRELATION_ID = 65529,
UNREGISTER_APP_INTERFACE_CORRELATION_ID = 65530,
POLICIES_CORRELATION_ID = 65535;
// Sdl Synchronization Objects
private static final Object CONNECTION_REFERENCE_LOCK = new Object(),
INCOMING_MESSAGE_QUEUE_THREAD_LOCK = new Object(),
OUTGOING_MESSAGE_QUEUE_THREAD_LOCK = new Object(),
INTERNAL_MESSAGE_QUEUE_THREAD_LOCK = new Object(),
ON_UPDATE_LISTENER_LOCK = new Object(),
ON_NOTIFICATION_LISTENER_LOCK = new Object();
private final Object APP_INTERFACE_REGISTERED_LOCK = new Object();
private int iFileCount = 0;
private boolean navServiceStartResponseReceived = false;
private boolean navServiceStartResponse = false;
private List<String> navServiceStartRejectedParams = null;
private boolean pcmServiceStartResponseReceived = false;
private boolean pcmServiceStartResponse = false;
@SuppressWarnings("FieldCanBeLocal")
private List<String> pcmServiceStartRejectedParams = null;
private boolean navServiceEndResponseReceived = false;
private boolean navServiceEndResponse = false;
private boolean pcmServiceEndResponseReceived = false;
private boolean pcmServiceEndResponse = false;
private boolean rpcProtectedResponseReceived = false;
private boolean rpcProtectedStartResponse = false;
// Device Info for logging
private TraceDeviceInfo _traceDeviceInterrogator = null;
// Declare Queuing Threads
private ProxyMessageDispatcher<ProtocolMessage> _incomingProxyMessageDispatcher;
private ProxyMessageDispatcher<ProtocolMessage> _outgoingProxyMessageDispatcher;
private ProxyMessageDispatcher<InternalProxyMessage> _internalProxyMessageDispatcher;
// Flag indicating if callbacks should be called from UIThread
private Boolean _callbackToUIThread = false;
// UI Handler
private Handler _mainUIHandler = null;
final int HEARTBEAT_CORRELATION_ID = 65531;
// SdlProxy Advanced Lifecycle Management
protected Boolean _advancedLifecycleManagementEnabled = false;
// Parameters passed to the constructor from the app to register an app interface
private String _applicationName = null;
private final long instanceDateTime = System.currentTimeMillis();
private String sConnectionDetails = "N/A";
private Vector<TTSChunk> _ttsName = null;
private String _ngnMediaScreenAppName = null;
private Boolean _isMediaApp = null;
private Language _sdlLanguageDesired = null;
private Language _hmiDisplayLanguageDesired = null;
private Vector<AppHMIType> _appType = null;
private String _appID = null;
private TemplateColorScheme _dayColorScheme = null;
private TemplateColorScheme _nightColorScheme = null;
@SuppressWarnings({"FieldCanBeLocal", "unused"}) //Need to understand what this is used for
private String _autoActivateIdDesired = null;
private String _lastHashID = null;
private SdlMsgVersion _sdlMsgVersionRequest = null;
private Vector<String> _vrSynonyms = null;
private boolean _bAppResumeEnabled = false;
private OnSystemRequest lockScreenIconRequest = null;
private TelephonyManager telephonyManager = null;
private DeviceInfo deviceInfo = null;
/**
* Contains current configuration for the transport that was selected during
* construction of this object
*/
private BaseTransportConfig _transportConfig = null;
// Proxy State Variables
protected Boolean _appInterfaceRegisterd = false;
protected Boolean _preRegisterd = false;
@SuppressWarnings({"unused", "FieldCanBeLocal"})
private Boolean _haveReceivedFirstNonNoneHMILevel = false;
protected Boolean _haveReceivedFirstFocusLevel = false;
protected Boolean _haveReceivedFirstFocusLevelFull = false;
protected Boolean _proxyDisposed = false;
protected SdlConnectionState _sdlConnectionState = null;
protected SdlInterfaceAvailability _sdlIntefaceAvailablity = null;
protected HMILevel _hmiLevel = null;
protected AudioStreamingState _audioStreamingState = null;
// Variables set by RegisterAppInterfaceResponse
protected SdlMsgVersion _sdlMsgVersion = null;
protected String _autoActivateIdReturned = null;
protected Language _sdlLanguage = null;
protected Language _hmiDisplayLanguage = null;
protected List<PrerecordedSpeech> _prerecordedSpeech = null;
protected VehicleType _vehicleType = null;
protected String _systemSoftwareVersion = null;
protected List<Integer> _diagModes = null;
protected Boolean firstTimeFull = true;
protected String _proxyVersionInfo = null;
protected Boolean _bResumeSuccess = false;
protected List<Class<? extends SdlSecurityBase>> _secList = null;
protected SystemCapabilityManager _systemCapabilityManager;
protected Boolean _iconResumed = false;
private final CopyOnWriteArrayList<IPutFileResponseListener> _putFileListenerList = new CopyOnWriteArrayList<IPutFileResponseListener>();
protected com.smartdevicelink.util.Version protocolVersion = new com.smartdevicelink.util.Version(1,0,0);
protected com.smartdevicelink.util.Version rpcSpecVersion;
protected SparseArray<OnRPCResponseListener> rpcResponseListeners = null;
protected SparseArray<CopyOnWriteArrayList<OnRPCNotificationListener>> rpcNotificationListeners = null;
protected VideoStreamingManager manager; //Will move to SdlSession once the class becomes public
// Interface broker
private SdlInterfaceBroker _interfaceBroker = null;
//We create an easily passable anonymous class of the interface so that we don't expose the internal interface to developers
private ISdl _internalInterface = new ISdl() {
@Override
public void start() {
try{
initializeProxy();
}catch (SdlException e){
e.printStackTrace();
}
}
@Override
public void stop() {
try{
dispose();
}catch (SdlException e){
e.printStackTrace();
}
}
@Override
public boolean isConnected() {
return getIsConnected();
}
@Override
public void addServiceListener(SessionType serviceType, ISdlServiceListener sdlServiceListener) {
SdlProxyBase.this.addServiceListener(serviceType,sdlServiceListener);
}
@Override
public void removeServiceListener(SessionType serviceType, ISdlServiceListener sdlServiceListener) {
SdlProxyBase.this.removeServiceListener(serviceType,sdlServiceListener);
}
@Override
public void startVideoService(VideoStreamingParameters parameters, boolean encrypted) {
if(isConnected()){
sdlSession.setDesiredVideoParams(parameters);
sdlSession.startService(SessionType.NAV,sdlSession.getSessionId(),encrypted);
}
}
@Override
public void stopVideoService() {
if(isConnected()){
sdlSession.endService(SessionType.NAV,sdlSession.getSessionId());
}
}
@Override public void stopAudioService() {
if(isConnected()){
sdlSession.endService(SessionType.PCM,sdlSession.getSessionId());
}
}
@Override
public void sendRPCRequest(RPCRequest message){
try {
SdlProxyBase.this.sendRPCRequest(message);
} catch (SdlException e) {
e.printStackTrace();
}
}
@Override
public void sendRequests(List<? extends RPCRequest> rpcs, OnMultipleRequestListener listener) {
try {
SdlProxyBase.this.sendRequests(rpcs, listener);
} catch (SdlException e) {
e.printStackTrace();
}
}
@Override
public void addOnRPCNotificationListener(FunctionID notificationId, OnRPCNotificationListener listener) {
SdlProxyBase.this.addOnRPCNotificationListener(notificationId,listener);
}
@Override
public boolean removeOnRPCNotificationListener(FunctionID notificationId, OnRPCNotificationListener listener) {
return SdlProxyBase.this.removeOnRPCNotificationListener(notificationId,listener);
}
@Override
public void addOnRPCListener(FunctionID responseId, OnRPCListener listener) {
DebugTool.logError("Proxy.addOnRPCResponseListener() is not implemented yet");
}
@Override
public boolean removeOnRPCListener(FunctionID responseId, OnRPCListener listener) {
DebugTool.logError("Proxy.removeOnRPCResponseListener() is not implemented yet");
return false;
}
@Override
public Object getCapability(SystemCapabilityType systemCapabilityType){
return SdlProxyBase.this.getCapability(systemCapabilityType);
}
@Override
public void getCapability(SystemCapabilityType systemCapabilityType, OnSystemCapabilityListener scListener) {
SdlProxyBase.this.getCapability(systemCapabilityType, scListener);
}
@Override
public SdlMsgVersion getSdlMsgVersion(){
try {
return SdlProxyBase.this.getSdlMsgVersion();
} catch (SdlException e) {
e.printStackTrace();
}
return null;
}
@Override
public com.smartdevicelink.util.Version getProtocolVersion() {
return SdlProxyBase.this.protocolVersion;
}
@Override
public boolean isCapabilitySupported(SystemCapabilityType systemCapabilityType){
return SdlProxyBase.this.isCapabilitySupported(systemCapabilityType);
}
@Override
public void addOnSystemCapabilityListener(SystemCapabilityType systemCapabilityType, OnSystemCapabilityListener listener) {
SdlProxyBase.this.removeOnSystemCapabilityListener(systemCapabilityType, listener);
}
@Override
public boolean removeOnSystemCapabilityListener(SystemCapabilityType systemCapabilityType, OnSystemCapabilityListener listener) {
return SdlProxyBase.this.removeOnSystemCapabilityListener(systemCapabilityType, listener);
}
@Override
public boolean isTransportForServiceAvailable(SessionType serviceType) {
return SdlProxyBase.this.sdlSession != null
&& SdlProxyBase.this.sdlSession.isTransportForServiceAvailable(serviceType);
}
@Override
public void startAudioService(boolean isEncrypted, AudioStreamingCodec codec,
AudioStreamingParams params) {
if(getIsConnected()){
SdlProxyBase.this.startAudioStream(isEncrypted, codec, params);
}
}
@Override
public void startAudioService(boolean encrypted) {
if(isConnected()){
sdlSession.startService(SessionType.PCM,sdlSession.getSessionId(),encrypted);
}
}
@Override
public IVideoStreamListener startVideoStream(boolean isEncrypted, VideoStreamingParameters parameters){
return SdlProxyBase.this.startVideoStream(isEncrypted, parameters);
}
@Override
public IAudioStreamListener startAudioStream(boolean isEncrypted, AudioStreamingCodec codec,
AudioStreamingParams params) {
return SdlProxyBase.this.startAudioStream(isEncrypted, codec, params);
}
};
private void notifyPutFileStreamError(Exception e, String info)
{
for (IPutFileResponseListener _putFileListener : _putFileListenerList) {
_putFileListener.onPutFileStreamError(e, info);
}
}
private void notifyPutFileStreamResponse(PutFileResponse msg)
{
for (IPutFileResponseListener _putFileListener : _putFileListenerList) {
_putFileListener.onPutFileResponse(msg);
}
}
public void addPutFileResponseListener(IPutFileResponseListener _putFileListener)
{
_putFileListenerList.addIfAbsent(_putFileListener);
}
public void remPutFileResponseListener(IPutFileResponseListener _putFileListener)
{
_putFileListenerList.remove(_putFileListener);
}
// Private Class to Interface with SdlConnection
private class SdlInterfaceBroker implements ISdlConnectionListener {
@Override
public void onTransportDisconnected(String info) {
// proxyOnTransportDisconnect is called to alert the proxy that a requested
// disconnect has completed
notifyPutFileStreamError(null, info);
//if (!_advancedLifecycleManagementEnabled) {
// If original model, notify app the proxy is closed so it will delete and reinstanciate
Log.d(TAG, "notifying proxy of closed");
notifyProxyClosed(info, new SdlException("Transport disconnected.", SdlExceptionCause.SDL_UNAVAILABLE), SdlDisconnectedReason.TRANSPORT_DISCONNECT);
//}// else If ALM, nothing is required to be done here
}
@Override
public void onTransportDisconnected(String info, boolean altTransportAvailable, MultiplexTransportConfig transportConfig) {
notifyPutFileStreamError(null, info);
if( altTransportAvailable){
SdlProxyBase.this._transportConfig = transportConfig;
Log.d(TAG, "notifying RPC session ended, but potential primary transport available");
cycleProxy(SdlDisconnectedReason.PRIMARY_TRANSPORT_CYCLE_REQUEST);
}else{
notifyProxyClosed(info, new SdlException("Transport disconnected.", SdlExceptionCause.SDL_UNAVAILABLE), SdlDisconnectedReason.TRANSPORT_DISCONNECT);
}
}
@Override
public void onTransportError(String info, Exception e) {
DebugTool.logError("Transport failure: " + info, e);
notifyPutFileStreamError(e, info);
if (_advancedLifecycleManagementEnabled) {
// Cycle the proxy
if(SdlConnection.isLegacyModeEnabled()){ //FIXME
cycleProxy(SdlDisconnectedReason.LEGACY_BLUETOOTH_MODE_ENABLED);
}else{
cycleProxy(SdlDisconnectedReason.TRANSPORT_ERROR);
}
} else {
notifyProxyClosed(info, e, SdlDisconnectedReason.TRANSPORT_ERROR);
}
}
@Override
public void onProtocolMessageReceived(ProtocolMessage msg) {
// AudioPathThrough is coming WITH BulkData but WITHOUT JSON Data
// Policy Snapshot is coming WITH BulkData and WITH JSON Data
if ((msg.getData() != null && msg.getData().length > 0) ||
(msg.getBulkData() != null && msg.getBulkData().length > 0)) {
queueIncomingMessage(msg);
}
}
@Override
public void onProtocolSessionStarted(SessionType sessionType,
byte sessionID, byte version, String correlationID, int hashID, boolean isEncrypted) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionStarted");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " ServiceType: " + sessionType.getName());
updateBroadcastIntent(sendIntent, "COMMENT3", " Encrypted: " + isEncrypted);
sendBroadcastIntent(sendIntent);
if(sdlSession!= null){
setProtocolVersion(sdlSession.getProtocolVersion());
}else{
setProtocolVersion(new com.smartdevicelink.util.Version(version,0,0));
}
if (sessionType.eq(SessionType.RPC)) {
if (!isEncrypted)
{
if ( (_transportConfig.getHeartBeatTimeout() != Integer.MAX_VALUE) && (version > 2))
{
HeartbeatMonitor outgoingHeartbeatMonitor = new HeartbeatMonitor();
outgoingHeartbeatMonitor.setInterval(_transportConfig.getHeartBeatTimeout());
sdlSession.setOutgoingHeartbeatMonitor(outgoingHeartbeatMonitor);
HeartbeatMonitor incomingHeartbeatMonitor = new HeartbeatMonitor();
incomingHeartbeatMonitor.setInterval(_transportConfig.getHeartBeatTimeout());
sdlSession.setIncomingHeartbeatMonitor(incomingHeartbeatMonitor);
}
startRPCProtocolSession();
}
else
{
RPCProtectedServiceStarted();
}
} else if (sessionType.eq(SessionType.NAV)) {
NavServiceStarted();
} else if (sessionType.eq(SessionType.PCM)) {
AudioServiceStarted();
} else if (sessionType.eq(SessionType.RPC)){
cycleProxy(SdlDisconnectedReason.RPC_SESSION_ENDED);
}
else if (protocolVersion!= null && protocolVersion.getMajor() > 1) {
//If version is 2 or above then don't need to specify a Session Type
startRPCProtocolSession();
} //else{} Handle other protocol session types here
}
@Override
public void onProtocolSessionStartedNACKed(SessionType sessionType,
byte sessionID, byte version, String correlationID, List<String> rejectedParams) {
OnServiceNACKed message = new OnServiceNACKed(sessionType);
queueInternalMessage(message);
if (sessionType.eq(SessionType.NAV)) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionStartedNACKed");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " NACK ServiceType: " + sessionType.getName());
sendBroadcastIntent(sendIntent);
NavServiceStartedNACK(rejectedParams);
}
else if (sessionType.eq(SessionType.PCM)) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionStartedNACKed");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " NACK ServiceType: " + sessionType.getName());
sendBroadcastIntent(sendIntent);
AudioServiceStartedNACK(rejectedParams);
}
}
@Override
public void onProtocolSessionEnded(SessionType sessionType,
byte sessionID, String correlationID) {
OnServiceEnded message = new OnServiceEnded(sessionType);
queueInternalMessage(message);
if (sessionType.eq(SessionType.NAV)) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionEnded");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " End ServiceType: " + sessionType.getName());
sendBroadcastIntent(sendIntent);
NavServiceEnded();
}
else if (sessionType.eq(SessionType.PCM)) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionEnded");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " End ServiceType: " + sessionType.getName());
sendBroadcastIntent(sendIntent);
AudioServiceEnded();
}
}
@Override
public void onProtocolError(String info, Exception e) {
notifyPutFileStreamError(e, info);
passErrorToProxyListener(info, e);
}
@Override
public void onHeartbeatTimedOut(byte sessionID) {
final String msg = "Heartbeat timeout";
DebugTool.logInfo(msg);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onHeartbeatTimedOut");
updateBroadcastIntent(sendIntent, "COMMENT1", "Heartbeat timeout for SessionID: " + sessionID);
sendBroadcastIntent(sendIntent);
notifyProxyClosed(msg, new SdlException(msg, SdlExceptionCause.HEARTBEAT_PAST_DUE), SdlDisconnectedReason.HB_TIMEOUT);
}
@Override
public void onProtocolSessionEndedNACKed(SessionType sessionType,
byte sessionID, String correlationID) {
if (sessionType.eq(SessionType.NAV)) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionEndedNACKed");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " End NACK ServiceType: " + sessionType.getName());
sendBroadcastIntent(sendIntent);
NavServiceEndedNACK();
}
else if (sessionType.eq(SessionType.PCM)) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionEndedNACKed");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " End NACK ServiceType: " + sessionType.getName());
sendBroadcastIntent(sendIntent);
AudioServiceEndedNACK();
}
}
public void onProtocolServiceDataACK(SessionType sessionType, final int dataSize,
byte sessionID) {
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onServiceDataACK(dataSize);
}
});
} else {
_proxyListener.onServiceDataACK(dataSize);
}
}
}
/**
* Used by the SdlManager
*
* @param listener Type of listener for this proxy base.
* @param context Application context.
* @param appName Client application name.
* @param shortAppName Client short application name.
* @param isMediaApp Flag that indicates that client application if media application or not.
* @param languageDesired Desired language.
* @param hmiDisplayLanguageDesired Desired language for HMI.
* @param appType Type of application.
* @param appID Application identifier.
* @param dayColorScheme TemplateColorScheme for the day
* @param nightColorScheme TemplateColorScheme for the night
* @param transportConfig Configuration of transport to be used by underlying connection.
* @param vrSynonyms List of synonyms.
* @param ttsName TTS name.
* @throws SdlException
*/
public SdlProxyBase(proxyListenerType listener, Context context, String appName,String shortAppName, Boolean isMediaApp, Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
BaseTransportConfig transportConfig, Vector<String> vrSynonyms, Vector<TTSChunk> ttsName, TemplateColorScheme dayColorScheme, TemplateColorScheme nightColorScheme) throws SdlException {
performBaseCommon(listener, null, true, appName, ttsName, shortAppName, vrSynonyms, isMediaApp,
null, languageDesired, hmiDisplayLanguageDesired, appType, appID, null, dayColorScheme,nightColorScheme, false, false, null, null, transportConfig);
_appContext = context;
}
/**
* Constructor.
*
* @param listener Type of listener for this proxy base.
* @param sdlProxyConfigurationResources Configuration resources for this proxy.
* @param enableAdvancedLifecycleManagement Flag that ALM should be enabled or not.
* @param appName Client application name.
* @param ttsName TTS name.
* @param ngnMediaScreenAppName Media Screen Application name.
* @param vrSynonyms List of synonyms.
* @param isMediaApp Flag that indicates that client application if media application or not.
* @param sdlMsgVersion Version of Sdl Message.
* @param languageDesired Desired language.
* @param hmiDisplayLanguageDesired Desired language for HMI.
* @param appType Type of application.
* @param appID Application identifier.
* @param autoActivateID Auto activation identifier.
* @param callbackToUIThread Flag that indicates that this proxy should send callback to UI thread or not.
* @param transportConfig Configuration of transport to be used by underlying connection.
* @throws SdlException if there is an unrecoverable error class might throw an exception.
*/
protected SdlProxyBase(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, boolean callbackToUIThread, BaseTransportConfig transportConfig)
throws SdlException {
performBaseCommon(listener, sdlProxyConfigurationResources, enableAdvancedLifecycleManagement, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
sdlMsgVersion, languageDesired, hmiDisplayLanguageDesired, appType, appID, autoActivateID, null, null, callbackToUIThread, null, null, null, transportConfig);
}
@SuppressWarnings("ConstantConditions")
private void performBaseCommon(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, TemplateColorScheme dayColorScheme, TemplateColorScheme nightColorScheme,
boolean callbackToUIThread, Boolean preRegister, String sHashID, Boolean bAppResumeEnab,
BaseTransportConfig transportConfig) throws SdlException
{
Log.i(TAG, "SDL_LIB_VERSION: " + com.smartdevicelink.proxy.Version.VERSION);
setProtocolVersion(new Version(PROX_PROT_VER_ONE,0,0));
if (preRegister != null && preRegister)
{
_appInterfaceRegisterd = preRegister;
_preRegisterd = preRegister;
}
if (bAppResumeEnab != null && bAppResumeEnab)
{
_bAppResumeEnabled = true;
_lastHashID = sHashID;
}
_interfaceBroker = new SdlInterfaceBroker();
_callbackToUIThread = callbackToUIThread;
if (_callbackToUIThread) {
_mainUIHandler = new Handler(Looper.getMainLooper());
}
// Set variables for Advanced Lifecycle Management
_advancedLifecycleManagementEnabled = enableAdvancedLifecycleManagement;
_applicationName = appName;
_ttsName = ttsName;
_ngnMediaScreenAppName = ngnMediaScreenAppName;
_isMediaApp = isMediaApp;
_sdlMsgVersionRequest = sdlMsgVersion;
_vrSynonyms = vrSynonyms;
_sdlLanguageDesired = languageDesired;
_hmiDisplayLanguageDesired = hmiDisplayLanguageDesired;
_appType = appType;
_appID = appID;
_autoActivateIdDesired = autoActivateID;
_dayColorScheme = dayColorScheme;
_nightColorScheme = nightColorScheme;
_transportConfig = transportConfig;
// Test conditions to invalidate the proxy
if (listener == null) {
throw new IllegalArgumentException("IProxyListener listener must be provided to instantiate SdlProxy object.");
}
if (_advancedLifecycleManagementEnabled) {
/* if (_applicationName == null ) {
throw new IllegalArgumentException("To use SdlProxyALM, an application name, appName, must be provided");
}
if (_applicationName.length() < 1 || _applicationName.length() > 100) {
throw new IllegalArgumentException("A provided application name, appName, must be between 1 and 100 characters in length.");
}*/
if (_isMediaApp == null) {
throw new IllegalArgumentException("isMediaApp must not be null when using SdlProxyALM.");
}
}
_proxyListener = listener;
// Get information from sdlProxyConfigurationResources
if (sdlProxyConfigurationResources != null) {
telephonyManager = sdlProxyConfigurationResources.getTelephonyManager();
}
// Use the telephonyManager to get and log phone info
if (telephonyManager != null) {
// Following is not quite thread-safe (because m_traceLogger could test null twice),
// so we need to fix this, but vulnerability (i.e. two instances of listener) is
// likely harmless.
if (_traceDeviceInterrogator == null) {
_traceDeviceInterrogator = new TraceDeviceInfo(telephonyManager);
} // end-if
} // end-if
// Setup Internal ProxyMessage Dispatcher
synchronized(INTERNAL_MESSAGE_QUEUE_THREAD_LOCK) {
// Ensure internalProxyMessageDispatcher is null
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.dispose();
_internalProxyMessageDispatcher = null;
}
_internalProxyMessageDispatcher = new ProxyMessageDispatcher<InternalProxyMessage>("INTERNAL_MESSAGE_DISPATCHER", new IDispatchingStrategy<InternalProxyMessage>() {
@Override
public void dispatch(InternalProxyMessage message) {
dispatchInternalMessage(message);
}
@Override
public void handleDispatchingError(String info, Exception ex) {
handleErrorsFromInternalMessageDispatcher(info, ex);
}
@Override
public void handleQueueingError(String info, Exception ex) {
handleErrorsFromInternalMessageDispatcher(info, ex);
}
});
}
// Setup Incoming ProxyMessage Dispatcher
synchronized(INCOMING_MESSAGE_QUEUE_THREAD_LOCK) {
// Ensure incomingProxyMessageDispatcher is null
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.dispose();
_incomingProxyMessageDispatcher = null;
}
_incomingProxyMessageDispatcher = new ProxyMessageDispatcher<ProtocolMessage>("INCOMING_MESSAGE_DISPATCHER",new IDispatchingStrategy<ProtocolMessage>() {
@Override
public void dispatch(ProtocolMessage message) {
dispatchIncomingMessage(message);
}
@Override
public void handleDispatchingError(String info, Exception ex) {
handleErrorsFromIncomingMessageDispatcher(info, ex);
}
@Override
public void handleQueueingError(String info, Exception ex) {
handleErrorsFromIncomingMessageDispatcher(info, ex);
}
});
}
// Setup Outgoing ProxyMessage Dispatcher
synchronized(OUTGOING_MESSAGE_QUEUE_THREAD_LOCK) {
// Ensure outgoingProxyMessageDispatcher is null
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.dispose();
_outgoingProxyMessageDispatcher = null;
}
_outgoingProxyMessageDispatcher = new ProxyMessageDispatcher<ProtocolMessage>("OUTGOING_MESSAGE_DISPATCHER",new IDispatchingStrategy<ProtocolMessage>() {
@Override
public void dispatch(ProtocolMessage message) {
dispatchOutgoingMessage(message);
}
@Override
public void handleDispatchingError(String info, Exception ex) {
handleErrorsFromOutgoingMessageDispatcher(info, ex);
}
@Override
public void handleQueueingError(String info, Exception ex) {
handleErrorsFromOutgoingMessageDispatcher(info, ex);
}
});
}
rpcResponseListeners = new SparseArray<OnRPCResponseListener>();
rpcNotificationListeners = new SparseArray<CopyOnWriteArrayList<OnRPCNotificationListener>>();
// Initialize the proxy
try {
initializeProxy();
} catch (SdlException e) {
// Couldn't initialize the proxy
// Dispose threads and then rethrow exception
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.dispose();
_internalProxyMessageDispatcher = null;
}
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.dispose();
_incomingProxyMessageDispatcher = null;
}
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.dispose();
_outgoingProxyMessageDispatcher = null;
}
throw e;
}
// Trace that ctor has fired
SdlTrace.logProxyEvent("SdlProxy Created, instanceID=" + this.toString(), SDL_LIB_TRACE_KEY);
}
protected SdlProxyBase(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, TemplateColorScheme dayColorScheme, TemplateColorScheme nightColorScheme,
boolean callbackToUIThread, boolean preRegister, String sHashID, Boolean bEnableResume, BaseTransportConfig transportConfig)
throws SdlException
{
performBaseCommon(listener, sdlProxyConfigurationResources, enableAdvancedLifecycleManagement, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
sdlMsgVersion, languageDesired, hmiDisplayLanguageDesired, appType, appID, autoActivateID, dayColorScheme, nightColorScheme, callbackToUIThread, preRegister, sHashID, bEnableResume, transportConfig);
}
/**
* Constructor.
*
* @param listener Type of listener for this proxy base.
* @param sdlProxyConfigurationResources Configuration resources for this proxy.
* @param enableAdvancedLifecycleManagement Flag that ALM should be enabled or not.
* @param appName Client application name.
* @param ttsName TTS name.
* @param ngnMediaScreenAppName Media Screen Application name.
* @param vrSynonyms List of synonyms.
* @param isMediaApp Flag that indicates that client application if media application or not.
* @param sdlMsgVersion Version of Sdl Message.
* @param languageDesired Desired language.
* @param hmiDisplayLanguageDesired Desired language for HMI.
* @param appType Type of application.
* @param appID Application identifier.
* @param autoActivateID Auto activation identifier.
* @param callbackToUIThread Flag that indicates that this proxy should send callback to UI thread or not.
* @param preRegister Flag that indicates that this proxy should be pre-registerd or not.
* @param transportConfig Configuration of transport to be used by underlying connection.
* @throws SdlException if there is an unrecoverable error class might throw an exception.
*/
protected SdlProxyBase(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, boolean callbackToUIThread, boolean preRegister, BaseTransportConfig transportConfig)
throws SdlException
{
performBaseCommon(listener, sdlProxyConfigurationResources, enableAdvancedLifecycleManagement, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
sdlMsgVersion, languageDesired, hmiDisplayLanguageDesired, appType, appID, autoActivateID, null, null, callbackToUIThread, preRegister, null, null, transportConfig);
}
/**
* Constructor.
*
* @param listener Type of listener for this proxy base.
* @param sdlProxyConfigurationResources Configuration resources for this proxy.
* @param enableAdvancedLifecycleManagement Flag that ALM should be enabled or not.
* @param appName Client application name.
* @param ttsName TTS name.
* @param ngnMediaScreenAppName Media Screen Application name.
* @param vrSynonyms List of synonyms.
* @param isMediaApp Flag that indicates that client application if media application or not.
* @param sdlMsgVersion Version of Sdl Message.
* @param languageDesired Desired language.
* @param hmiDisplayLanguageDesired Desired language for HMI.
* @param appType Type of application.
* @param appID Application identifier.
* @param autoActivateID Auto activation identifier.
* @param dayColorScheme Day color scheme.
* @param dayColorScheme Night color scheme.
* @param callbackToUIThread Flag that indicates that this proxy should send callback to UI thread or not.
* @param preRegister Flag that indicates that this proxy should be pre-registerd or not.
* @param transportConfig Configuration of transport to be used by underlying connection.
* @throws SdlException if there is an unrecoverable error class might throw an exception.
*/
protected SdlProxyBase(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, TemplateColorScheme dayColorScheme, TemplateColorScheme nightColorScheme,
boolean callbackToUIThread, boolean preRegister, BaseTransportConfig transportConfig)
throws SdlException
{
performBaseCommon(listener, sdlProxyConfigurationResources, enableAdvancedLifecycleManagement, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
sdlMsgVersion, languageDesired, hmiDisplayLanguageDesired, appType, appID, autoActivateID, dayColorScheme, nightColorScheme, callbackToUIThread, preRegister, null, null, transportConfig);
}
private Intent createBroadcastIntent()
{
Intent sendIntent = new Intent();
sendIntent.setAction("com.smartdevicelink.broadcast");
sendIntent.putExtra("APP_NAME", this._applicationName);
sendIntent.putExtra("APP_ID", this._appID);
sendIntent.putExtra("RPC_NAME", "");
sendIntent.putExtra("TYPE", "");
sendIntent.putExtra("SUCCESS", true);
sendIntent.putExtra("CORRID", 0);
sendIntent.putExtra("FUNCTION_NAME", "");
sendIntent.putExtra("COMMENT1", "");
sendIntent.putExtra("COMMENT2", "");
sendIntent.putExtra("COMMENT3", "");
sendIntent.putExtra("COMMENT4", "");
sendIntent.putExtra("COMMENT5", "");
sendIntent.putExtra("COMMENT6", "");
sendIntent.putExtra("COMMENT7", "");
sendIntent.putExtra("COMMENT8", "");
sendIntent.putExtra("COMMENT9", "");
sendIntent.putExtra("COMMENT10", "");
sendIntent.putExtra("DATA", "");
sendIntent.putExtra("SHOW_ON_UI", true);
return sendIntent;
}
private void updateBroadcastIntent(Intent sendIntent, String sKey, String sValue)
{
if (sValue == null) sValue = "";
sendIntent.putExtra(sKey, sValue);
}
private void updateBroadcastIntent(Intent sendIntent, String sKey, boolean bValue)
{
sendIntent.putExtra(sKey, bValue);
}
private void updateBroadcastIntent(Intent sendIntent, String sKey, int iValue)
{
sendIntent.putExtra(sKey, iValue);
}
private Service getService()
{
try {
Service myService = null;
if (_proxyListener != null && _proxyListener instanceof Service) {
myService = (Service) _proxyListener;
} else if (_appService != null) {
myService = _appService;
} else if (_appContext != null && _appContext instanceof Service) {
myService = (Service) _appContext;
}
return myService;
} catch (Exception ex){
return null;
}
}
private void sendBroadcastIntent(Intent sendIntent)
{
Service myService = null;
if (_proxyListener != null && _proxyListener instanceof Service)
{
myService = (Service) _proxyListener;
}
else if (_appService != null)
{
myService = _appService;
}
Context myContext;
if (myService != null){
myContext = myService.getApplicationContext();
} else if (_appContext != null){
myContext = _appContext;
}
else
{
return;
}
try
{
if (myContext != null) myContext.sendBroadcast(sendIntent);
}
catch(Exception ex)
{
//If the service or context has become unavailable unexpectedly, catch the exception and move on -- no broadcast log will occur.
}
}
private HttpURLConnection getURLConnection(Headers myHeader, String sURLString, int Timeout, int iContentLen)
{
String sContentType = "application/json";
int CONNECTION_TIMEOUT = Timeout * 1000;
int READ_TIMEOUT = Timeout * 1000;
boolean bDoOutput = true;
boolean bDoInput = true;
boolean bUsesCaches = false;
String sRequestMeth = "POST";
boolean bInstFolRed = false;
String sCharSet = "utf-8";
int iContentLength = iContentLen;
URL url;
HttpURLConnection urlConnection;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "getURLConnection");
updateBroadcastIntent(sendIntent, "COMMENT1", "Actual Content Length: " + iContentLen);
if (myHeader != null)
{
//if the header isn't null, use it and replace the hardcoded params
int iTimeout;
int iReadTimeout;
sContentType = myHeader.getContentType();
iTimeout = myHeader.getConnectTimeout();
bDoOutput = myHeader.getDoOutput();
bDoInput = myHeader.getDoInput();
bUsesCaches = myHeader.getUseCaches();
sRequestMeth = myHeader.getRequestMethod();
iReadTimeout = myHeader.getReadTimeout();
bInstFolRed = myHeader.getInstanceFollowRedirects();
sCharSet = myHeader.getCharset();
iContentLength = myHeader.getContentLength();
CONNECTION_TIMEOUT = iTimeout*1000;
READ_TIMEOUT = iReadTimeout*1000;
updateBroadcastIntent(sendIntent, "COMMENT2", "\nHeader Defined Content Length: " + iContentLength);
}
try
{
url = new URL(sURLString);
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setConnectTimeout(CONNECTION_TIMEOUT);
urlConnection.setDoOutput(bDoOutput);
urlConnection.setDoInput(bDoInput);
urlConnection.setRequestMethod(sRequestMeth);
urlConnection.setReadTimeout(READ_TIMEOUT);
urlConnection.setInstanceFollowRedirects(bInstFolRed);
urlConnection.setRequestProperty("Content-Type", sContentType);
urlConnection.setRequestProperty("charset", sCharSet);
urlConnection.setRequestProperty("Content-Length", "" + Integer.toString(iContentLength));
urlConnection.setUseCaches(bUsesCaches);
return urlConnection;
}
catch (Exception e)
{
return null;
}
finally
{
sendBroadcastIntent(sendIntent);
}
}
private void sendOnSystemRequestToUrl(OnSystemRequest msg)
{
Intent sendIntent = createBroadcastIntent();
Intent sendIntent2 = createBroadcastIntent();
HttpURLConnection urlConnection = null;
boolean bLegacy = false;
String sURLString;
if (!getPoliciesURL().equals(""))
sURLString = sPoliciesURL;
else
sURLString = msg.getUrl();
Integer iTimeout = msg.getTimeout();
if (iTimeout == null)
iTimeout = 2000;
Headers myHeader = msg.getHeader();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "sendOnSystemRequestToUrl");
updateBroadcastIntent(sendIntent, "COMMENT5", "\r\nCloud URL: " + sURLString);
try
{
if (myHeader == null)
updateBroadcastIntent(sendIntent, "COMMENT7", "\r\nHTTPRequest Header is null");
String sBodyString = msg.getBody();
JSONObject jsonObjectToSendToServer;
String valid_json = "";
int length;
if (sBodyString == null)
{
if(RequestType.HTTP.equals(msg.getRequestType())){
length = msg.getBulkData().length;
Intent sendIntent3 = createBroadcastIntent();
updateBroadcastIntent(sendIntent3, "FUNCTION_NAME", "replace");
updateBroadcastIntent(sendIntent3, "COMMENT1", "Valid Json length before replace: " + length);
sendBroadcastIntent(sendIntent3);
}else{
List<String> legacyData = msg.getLegacyData();
JSONArray jsonArrayOfSdlPPackets = new JSONArray(legacyData);
jsonObjectToSendToServer = new JSONObject();
jsonObjectToSendToServer.put("data", jsonArrayOfSdlPPackets);
bLegacy = true;
updateBroadcastIntent(sendIntent, "COMMENT6", "\r\nLegacy SystemRequest: true");
valid_json = jsonObjectToSendToServer.toString().replace("\\", "");
length = valid_json.getBytes("UTF-8").length;
}
}
else
{
Intent sendIntent3 = createBroadcastIntent();
updateBroadcastIntent(sendIntent3, "FUNCTION_NAME", "replace");
updateBroadcastIntent(sendIntent3, "COMMENT1", "Valid Json length before replace: " + sBodyString.getBytes("UTF-8").length);
sendBroadcastIntent(sendIntent3);
valid_json = sBodyString.replace("\\", "");
length = valid_json.getBytes("UTF-8").length;
}
urlConnection = getURLConnection(myHeader, sURLString, iTimeout, length);
if (urlConnection == null)
{
Log.i(TAG, "urlConnection is null, check RPC input parameters");
updateBroadcastIntent(sendIntent, "COMMENT2", "urlConnection is null, check RPC input parameters");
return;
}
DataOutputStream wr = new DataOutputStream(urlConnection.getOutputStream());
if(RequestType.HTTP.equals(msg.getRequestType())){
wr.write(msg.getBulkData());
}else{
wr.writeBytes(valid_json);
}
wr.flush();
wr.close();
long BeforeTime = System.currentTimeMillis();
long AfterTime = System.currentTimeMillis();
final long roundtriptime = AfterTime - BeforeTime;
updateBroadcastIntent(sendIntent, "COMMENT4", " Round trip time: " + roundtriptime);
updateBroadcastIntent(sendIntent, "COMMENT1", "Received response from cloud, response code=" + urlConnection.getResponseCode() + " ");
int iResponseCode = urlConnection.getResponseCode();
if (iResponseCode != HttpURLConnection.HTTP_OK)
{
Log.i(TAG, "Response code not HTTP_OK, returning from sendOnSystemRequestToUrl.");
updateBroadcastIntent(sendIntent, "COMMENT2", "Response code not HTTP_OK, aborting request. ");
return;
}
InputStream is = urlConnection.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
StringBuilder response = new StringBuilder();
while((line = rd.readLine()) != null)
{
response.append(line);
response.append('\r');
}
rd.close();
//We've read the body
if(RequestType.HTTP.equals(msg.getRequestType())){
// Create the SystemRequest RPC to send to module.
PutFile putFile = new PutFile();
putFile.setFileType(FileType.JSON);
putFile.setCorrelationID(POLICIES_CORRELATION_ID);
putFile.setSdlFileName("response_data");
putFile.setFileData(response.toString().getBytes("UTF-8"));
putFile.setCRC(response.toString().getBytes());
updateBroadcastIntent(sendIntent, "DATA", "Data from cloud response: " + response.toString());
sendRPCRequestPrivate(putFile);
Log.i("sendSystemRequestToUrl", "sent to sdl");
updateBroadcastIntent(sendIntent2, "RPC_NAME", FunctionID.PUT_FILE.toString());
updateBroadcastIntent(sendIntent2, "TYPE", RPCMessage.KEY_REQUEST);
updateBroadcastIntent(sendIntent2, "CORRID", putFile.getCorrelationID());
}else{
Vector<String> cloudDataReceived = new Vector<String>();
final String dataKey = "data";
// Convert the response to JSON
JSONObject jsonResponse = new JSONObject(response.toString());
if(jsonResponse.has(dataKey)){
if (jsonResponse.get(dataKey) instanceof JSONArray)
{
JSONArray jsonArray = jsonResponse.getJSONArray(dataKey);
for (int i=0; i<jsonArray.length(); i++)
{
if (jsonArray.get(i) instanceof String)
{
cloudDataReceived.add(jsonArray.getString(i));
//Log.i("sendSystemRequestToUrl", "jsonArray.getString(i): " + jsonArray.getString(i));
}
}
}
else if (jsonResponse.get(dataKey) instanceof String)
{
cloudDataReceived.add(jsonResponse.getString(dataKey));
//Log.i("sendSystemRequestToUrl", "jsonResponse.getString(data): " + jsonResponse.getString("data"));
}
}
else
{
DebugTool.logError("sendSystemRequestToUrl: Data in JSON Object neither an array nor a string.");
//Log.i("sendSystemRequestToUrl", "sendSystemRequestToUrl: Data in JSON Object neither an array nor a string.");
return;
}
String sResponse = cloudDataReceived.toString();
if (sResponse.length() > 512)
{
sResponse = sResponse.substring(0, 511);
}
updateBroadcastIntent(sendIntent, "DATA", "Data from cloud response: " + sResponse);
// Send new SystemRequest to SDL
SystemRequest mySystemRequest = null;
if (bLegacy){
if(cloudDataReceived != null) {
mySystemRequest = new SystemRequest(true);
mySystemRequest.setCorrelationID(getPoliciesReservedCorrelationID());
mySystemRequest.setLegacyData(cloudDataReceived);
}
}else{
if (response != null) {
mySystemRequest = new SystemRequest();
mySystemRequest.setRequestType(RequestType.PROPRIETARY);
mySystemRequest.setCorrelationID(getPoliciesReservedCorrelationID());
mySystemRequest.setBulkData(response.toString().getBytes());
}
}
if (getIsConnected())
{
sendRPCRequestPrivate(mySystemRequest);
Log.i("sendSystemRequestToUrl", "sent to sdl");
updateBroadcastIntent(sendIntent2, "RPC_NAME", FunctionID.SYSTEM_REQUEST.toString());
updateBroadcastIntent(sendIntent2, "TYPE", RPCMessage.KEY_REQUEST);
updateBroadcastIntent(sendIntent2, "CORRID", mySystemRequest.getCorrelationID());
}
}
}
catch (SdlException e)
{
DebugTool.logError("sendSystemRequestToUrl: Could not get data from JSONObject received.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " SdlException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendSystemRequestToUrl: Could not get data from JSONObject received."+ e);
}
catch (JSONException e)
{
DebugTool.logError("sendSystemRequestToUrl: JSONException: ", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " JSONException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendSystemRequestToUrl: JSONException: "+ e);
}
catch (UnsupportedEncodingException e)
{
DebugTool.logError("sendSystemRequestToUrl: Could not encode string.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " UnsupportedEncodingException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendSystemRequestToUrl: Could not encode string."+ e);
}
catch (ProtocolException e)
{
DebugTool.logError("sendSystemRequestToUrl: Could not set request method to post.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " ProtocolException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendSystemRequestToUrl: Could not set request method to post."+ e);
}
catch (MalformedURLException e)
{
DebugTool.logError("sendSystemRequestToUrl: URL Exception when sending SystemRequest to an external server.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " MalformedURLException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendSystemRequestToUrl: URL Exception when sending SystemRequest to an external server."+ e);
}
catch (IOException e)
{
DebugTool.logError("sendSystemRequestToUrl: IOException: ", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " IOException while sending to cloud: IOException: "+ e);
//Log.i("pt", "sendSystemRequestToUrl: IOException: "+ e);
}
catch (Exception e)
{
DebugTool.logError("sendSystemRequestToUrl: Unexpected Exception: ", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " Exception encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendSystemRequestToUrl: Unexpected Exception: " + e);
}
finally
{
sendBroadcastIntent(sendIntent);
sendBroadcastIntent(sendIntent2);
if (iFileCount < 10)
iFileCount++;
else
iFileCount = 0;
if(urlConnection != null)
{
urlConnection.disconnect();
}
}
}
private int getPoliciesReservedCorrelationID() {
return POLICIES_CORRELATION_ID;
}
// Test correlationID
private boolean isCorrelationIDProtected(Integer correlationID) {
return correlationID != null &&
(HEARTBEAT_CORRELATION_ID == correlationID
|| REGISTER_APP_INTERFACE_CORRELATION_ID == correlationID
|| UNREGISTER_APP_INTERFACE_CORRELATION_ID == correlationID
|| POLICIES_CORRELATION_ID == correlationID);
}
// Protected isConnected method to allow legacy proxy to poll isConnected state
public Boolean getIsConnected() {
return sdlSession != null && sdlSession.getIsConnected();
}
/**
* Returns whether the application is registered in SDL. Note: for testing
* purposes, it's possible that the connection is established, but the
* application is not registered.
*
* @return true if the application is registered in SDL
*/
public Boolean getAppInterfaceRegistered() {
return _appInterfaceRegisterd;
}
// Function to initialize new proxy connection
public void initializeProxy() throws SdlException {
// Reset all of the flags and state variables
_haveReceivedFirstNonNoneHMILevel = false;
_haveReceivedFirstFocusLevel = false;
_haveReceivedFirstFocusLevelFull = false;
_appInterfaceRegisterd = _preRegisterd;
_putFileListenerList.clear();
_sdlIntefaceAvailablity = SdlInterfaceAvailability.SDL_INTERFACE_UNAVAILABLE;
//Initialize _systemCapabilityManager here.
_systemCapabilityManager = new SystemCapabilityManager(_internalInterface);
// Setup SdlConnection
synchronized(CONNECTION_REFERENCE_LOCK) {
//Handle legacy USB connections
if(_transportConfig != null
&& TransportType.USB.equals(_transportConfig.getTransportType())){
//A USB transport config was provided
USBTransportConfig usbTransportConfig = (USBTransportConfig)_transportConfig;
if(usbTransportConfig.getUsbAccessory() == null){
DebugTool.logInfo("Legacy USB transport config was used, but received null for accessory. Attempting to connect with router service");
//The accessory was null which means it came from a router service
MultiplexTransportConfig multiplexTransportConfig = new MultiplexTransportConfig(usbTransportConfig.getUSBContext(),_appID);
multiplexTransportConfig.setRequiresHighBandwidth(true);
multiplexTransportConfig.setSecurityLevel(MultiplexTransportConfig.FLAG_MULTI_SECURITY_OFF);
multiplexTransportConfig.setPrimaryTransports(Collections.singletonList(TransportType.USB));
multiplexTransportConfig.setSecondaryTransports(new ArrayList<TransportType>());
_transportConfig = multiplexTransportConfig;
}
}
if(_transportConfig.getTransportType().equals(TransportType.MULTIPLEX)){
this.sdlSession = new SdlSession2(_interfaceBroker,(MultiplexTransportConfig)_transportConfig);
}else{
this.sdlSession = SdlSession.createSession((byte)getProtocolVersion().getMajor(),_interfaceBroker, _transportConfig);
}
}
synchronized(CONNECTION_REFERENCE_LOCK) {
this.sdlSession.startSession();
sendTransportBroadcast();
}
}
/**
* This method will fake the multiplex connection event
*/
@SuppressWarnings("unused")
public void forceOnConnected(){
synchronized(CONNECTION_REFERENCE_LOCK) {
if (sdlSession != null) {
Log.d(TAG, "Forcing on connected.... might actually need this"); //FIXME
/*if(sdlSession.getSdlConnection()==null){ //There is an issue when switching from v1 to v2+ where the connection is closed. So we restart the session during this call.
try {
sdlSession.startSession();
} catch (SdlException e) {
e.printStackTrace();
}
}
sdlSession.getSdlConnection().forceHardwareConnectEvent(TransportType.BLUETOOTH);
*/
}
}
}
public void sendTransportBroadcast()
{
if (sdlSession == null || _transportConfig == null) return;
String sTransComment = sdlSession.getBroadcastComment(_transportConfig);
if (sTransComment == null || sTransComment.equals("")) return;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "initializeProxy");
updateBroadcastIntent(sendIntent, "COMMENT1", sTransComment);
sendBroadcastIntent(sendIntent);
}
/**
* Public method to enable the siphon transport
*/
@SuppressWarnings("unused")
public void enableSiphonDebug() {
short enabledPortNumber = SiphonServer.enableSiphonServer();
String sSiphonPortNumber = "Enabled Siphon Port Number: " + enabledPortNumber;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "enableSiphonDebug");
updateBroadcastIntent(sendIntent, "COMMENT1", sSiphonPortNumber);
sendBroadcastIntent(sendIntent);
}
/**
* Public method to disable the Siphon Trace Server
*/
@SuppressWarnings("unused")
public void disableSiphonDebug() {
short disabledPortNumber = SiphonServer.disableSiphonServer();
if (disabledPortNumber != -1) {
String sSiphonPortNumber = "Disabled Siphon Port Number: " + disabledPortNumber;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "disableSiphonDebug");
updateBroadcastIntent(sendIntent, "COMMENT1", sSiphonPortNumber);
sendBroadcastIntent(sendIntent);
}
}
/**
* Public method to enable the Debug Tool
*/
public static void enableDebugTool() {
DebugTool.enableDebugTool();
}
/**
* Public method to disable the Debug Tool
*/
public static void disableDebugTool() {
DebugTool.disableDebugTool();
}
/**
* Public method to determine Debug Tool enabled
*/
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
public static boolean isDebugEnabled() {
return DebugTool.isDebugEnabled();
}
/**
* Check to see if it a transport is available to perform audio streaming.
* <br><strong>NOTE:</strong> This is only for the audio streaming service, not regular
* streaming of media playback.
* @return true if there is either an audio streaming supported
* transport currently connected or a transport is
* available to connect with. false if there is no
* transport connected to support audio streaming and
* no possibility in the foreseeable future.
*/
public boolean isAudioStreamTransportAvailable(){
return sdlSession!= null && sdlSession.isTransportForServiceAvailable(SessionType.PCM);
}
/**
* Check to see if it a transport is available to perform video streaming.
* @return true if there is either an video streaming supported
* transport currently connected or a transport is
* available to connect with. false if there is no
* transport connected to support video streaming and
* no possibility in the foreseeable future.
*/
public boolean isVideoStreamTransportAvailable(){
return sdlSession!= null && sdlSession.isTransportForServiceAvailable(SessionType.NAV);
}
@SuppressWarnings("unused")
@Deprecated
public void close() throws SdlException {
dispose();
}
@SuppressWarnings("UnusedParameters")
private void cleanProxy(SdlDisconnectedReason disconnectedReason) throws SdlException {
try {
// ALM Specific Cleanup
if (_advancedLifecycleManagementEnabled) {
_sdlConnectionState = SdlConnectionState.SDL_DISCONNECTED;
firstTimeFull = true;
// Should we wait for the interface to be unregistered?
Boolean waitForInterfaceUnregistered = false;
// Unregister app interface
synchronized(CONNECTION_REFERENCE_LOCK) {
if (getIsConnected() && getAppInterfaceRegistered()) {
waitForInterfaceUnregistered = true;
unregisterAppInterfacePrivate(UNREGISTER_APP_INTERFACE_CORRELATION_ID);
}
}
// Wait for the app interface to be unregistered
if (waitForInterfaceUnregistered) {
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
try {
APP_INTERFACE_REGISTERED_LOCK.wait(3000);
} catch (InterruptedException e) {
// Do nothing
}
}
}
}
if(rpcResponseListeners != null){
rpcResponseListeners.clear();
}
if(rpcNotificationListeners != null){
rpcNotificationListeners.clear();
}
// Clean up SDL Connection
synchronized(CONNECTION_REFERENCE_LOCK) {
if (sdlSession != null) sdlSession.close();
}
} finally {
SdlTrace.logProxyEvent("SdlProxy cleaned.", SDL_LIB_TRACE_KEY);
}
}
/**
* Terminates the App's Interface Registration, closes the transport connection, ends the protocol session, and frees any resources used by the proxy.
*/
public void dispose() throws SdlException
{
if (_proxyDisposed) {
throw new SdlException("This object has been disposed, it is no long capable of executing methods.", SdlExceptionCause.SDL_PROXY_DISPOSED);
}
_proxyDisposed = true;
SdlTrace.logProxyEvent("Application called dispose() method.", SDL_LIB_TRACE_KEY);
try{
// Clean the proxy
cleanProxy(SdlDisconnectedReason.APPLICATION_REQUESTED_DISCONNECT);
// Close IncomingProxyMessageDispatcher thread
synchronized(INCOMING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.dispose();
_incomingProxyMessageDispatcher = null;
}
}
// Close OutgoingProxyMessageDispatcher thread
synchronized(OUTGOING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.dispose();
_outgoingProxyMessageDispatcher = null;
}
}
// Close InternalProxyMessageDispatcher thread
synchronized(INTERNAL_MESSAGE_QUEUE_THREAD_LOCK) {
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.dispose();
_internalProxyMessageDispatcher = null;
}
}
_traceDeviceInterrogator = null;
rpcResponseListeners = null;
} finally {
SdlTrace.logProxyEvent("SdlProxy disposed.", SDL_LIB_TRACE_KEY);
}
} // end-method
private final static Object CYCLE_LOCK = new Object();
private boolean _cycling = false;
// Method to cycle the proxy, only called in ALM
protected void cycleProxy(SdlDisconnectedReason disconnectedReason) {
if (_cycling) return;
synchronized(CYCLE_LOCK)
{
try{
_cycling = true;
cleanProxy(disconnectedReason);
initializeProxy();
if(!SdlDisconnectedReason.LEGACY_BLUETOOTH_MODE_ENABLED.equals(disconnectedReason)
&& !SdlDisconnectedReason.PRIMARY_TRANSPORT_CYCLE_REQUEST.equals(disconnectedReason)){//We don't want to alert higher if we are just cycling for legacy bluetooth
notifyProxyClosed("Sdl Proxy Cycled", new SdlException("Sdl Proxy Cycled", SdlExceptionCause.SDL_PROXY_CYCLED), disconnectedReason);
}
}
catch (SdlException e) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "cycleProxy");
updateBroadcastIntent(sendIntent, "COMMENT1", "Proxy cycled, exception cause: " + e.getSdlExceptionCause());
sendBroadcastIntent(sendIntent);
switch(e.getSdlExceptionCause()) {
case BLUETOOTH_DISABLED:
notifyProxyClosed("Bluetooth is disabled. Bluetooth must be enabled to connect to SDL. Reattempt a connection once Bluetooth is enabled.",
new SdlException("Bluetooth is disabled. Bluetooth must be enabled to connect to SDL. Reattempt a connection once Bluetooth is enabled.", SdlExceptionCause.BLUETOOTH_DISABLED), SdlDisconnectedReason.BLUETOOTH_DISABLED);
break;
case BLUETOOTH_ADAPTER_NULL:
notifyProxyClosed("Cannot locate a Bluetooth adapater. A SDL connection is impossible on this device until a Bluetooth adapter is added.",
new SdlException("Cannot locate a Bluetooth adapater. A SDL connection is impossible on this device until a Bluetooth adapter is added.", SdlExceptionCause.BLUETOOTH_ADAPTER_NULL), SdlDisconnectedReason.BLUETOOTH_ADAPTER_ERROR);
break;
default :
notifyProxyClosed("Cycling the proxy failed.", e, SdlDisconnectedReason.GENERIC_ERROR);
break;
}
} catch (Exception e) {
notifyProxyClosed("Cycling the proxy failed.", e, SdlDisconnectedReason.GENERIC_ERROR);
}
_cycling = false;
}
}
/************* Functions used by the Message Dispatching Queues ****************/
private void dispatchIncomingMessage(ProtocolMessage message) {
try{
// Dispatching logic
if (message.getSessionType().equals(SessionType.RPC)
||message.getSessionType().equals(SessionType.BULK_DATA) ) {
try {
if (protocolVersion!= null && protocolVersion.getMajor() == 1 && message.getVersion() > 1) {
if(sdlSession != null
&& sdlSession.getProtocolVersion()!= null
&& sdlSession.getProtocolVersion().getMajor() > 1){
setProtocolVersion(sdlSession.getProtocolVersion());
}else{
setProtocolVersion(new Version(message.getVersion(),0,0));
}
}
Hashtable<String, Object> hash = new Hashtable<String, Object>();
if (protocolVersion!= null && protocolVersion.getMajor() > 1) {
Hashtable<String, Object> hashTemp = new Hashtable<String, Object>();
hashTemp.put(RPCMessage.KEY_CORRELATION_ID, message.getCorrID());
if (message.getJsonSize() > 0) {
final Hashtable<String, Object> mhash = JsonRPCMarshaller.unmarshall(message.getData());
//hashTemp.put(Names.parameters, mhash.get(Names.parameters));
if (mhash != null) {
hashTemp.put(RPCMessage.KEY_PARAMETERS, mhash);
}
}
String functionName = FunctionID.getFunctionName(message.getFunctionID());
if (functionName != null) {
hashTemp.put(RPCMessage.KEY_FUNCTION_NAME, functionName);
} else {
DebugTool.logWarning("Dispatch Incoming Message - function name is null unknown RPC. FunctionId: " + message.getFunctionID());
return;
}
if (message.getRPCType() == 0x00) {
hash.put(RPCMessage.KEY_REQUEST, hashTemp);
} else if (message.getRPCType() == 0x01) {
hash.put(RPCMessage.KEY_RESPONSE, hashTemp);
} else if (message.getRPCType() == 0x02) {
hash.put(RPCMessage.KEY_NOTIFICATION, hashTemp);
}
if (message.getBulkData() != null) hash.put(RPCStruct.KEY_BULK_DATA, message.getBulkData());
if (message.getPayloadProtected()) hash.put(RPCStruct.KEY_PROTECTED, true);
} else {
hash = JsonRPCMarshaller.unmarshall(message.getData());
}
handleRPCMessage(hash);
} catch (final Exception excp) {
DebugTool.logError("Failure handling protocol message: " + excp.toString(), excp);
passErrorToProxyListener("Error handing incoming protocol message.", excp);
} // end-catch
} //else { Handle other protocol message types here}
} catch (final Exception e) {
// Pass error to application through listener
DebugTool.logError("Error handing proxy event.", e);
passErrorToProxyListener("Error handing incoming protocol message.", e);
}
}
/**
* Get the SDL protocol spec version being used
* @return Version of the protocol spec
*/
public @NonNull Version getProtocolVersion(){
if(this.protocolVersion == null){
this.protocolVersion = new Version(1,0,0);
}
return this.protocolVersion;
}
private void setProtocolVersion(@NonNull Version version) {
this.protocolVersion = version;
}
public String serializeJSON(RPCMessage msg)
{
try
{
return msg.serializeJSON((byte)this.getProtocolVersion().getMajor()).toString(2);
}
catch (final Exception e)
{
DebugTool.logError("Error handing proxy event.", e);
passErrorToProxyListener("Error serializing message.", e);
return null;
}
}
private void handleErrorsFromIncomingMessageDispatcher(String info, Exception e) {
passErrorToProxyListener(info, e);
}
private void dispatchOutgoingMessage(ProtocolMessage message) {
synchronized(CONNECTION_REFERENCE_LOCK) {
if (sdlSession != null) {
sdlSession.sendMessage(message);
}
}
SdlTrace.logProxyEvent("SdlProxy sending Protocol Message: " + message.toString(), SDL_LIB_TRACE_KEY);
}
private void handleErrorsFromOutgoingMessageDispatcher(String info, Exception e) {
passErrorToProxyListener(info, e);
}
void dispatchInternalMessage(final InternalProxyMessage message) {
try{
switch (message.getFunctionName()) {
case InternalProxyMessage.OnProxyError: {
final OnError msg = (OnError) message;
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onError(msg.getInfo(), msg.getException());
}
});
} else {
_proxyListener.onError(msg.getInfo(), msg.getException());
}
break;
}
case InternalProxyMessage.OnServiceEnded: {
final OnServiceEnded msg = (OnServiceEnded) message;
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onServiceEnded(msg);
}
});
} else {
_proxyListener.onServiceEnded(msg);
}
break;
}
case InternalProxyMessage.OnServiceNACKed: {
final OnServiceNACKed msg = (OnServiceNACKed) message;
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onServiceNACKed(msg);
}
});
} else {
_proxyListener.onServiceNACKed(msg);
}
/* *************Start Legacy Specific Call-backs************/
break;
}
case InternalProxyMessage.OnProxyOpened:
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
((IProxyListener) _proxyListener).onProxyOpened();
}
});
} else {
((IProxyListener) _proxyListener).onProxyOpened();
}
break;
case InternalProxyMessage.OnProxyClosed: {
final OnProxyClosed msg = (OnProxyClosed) message;
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onProxyClosed(msg.getInfo(), msg.getException(), msg.getReason());
}
});
} else {
_proxyListener.onProxyClosed(msg.getInfo(), msg.getException(), msg.getReason());
}
/* ***************End Legacy Specific Call-backs************/
break;
}
default:
// Diagnostics
SdlTrace.logProxyEvent("Unknown RPC Message encountered. Check for an updated version of the SDL Proxy.", SDL_LIB_TRACE_KEY);
DebugTool.logError("Unknown RPC Message encountered. Check for an updated version of the SDL Proxy.");
break;
}
SdlTrace.logProxyEvent("Proxy fired callback: " + message.getFunctionName(), SDL_LIB_TRACE_KEY);
} catch(final Exception e) {
// Pass error to application through listener
DebugTool.logError("Error handing proxy event.", e);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onError("Error handing proxy event.", e);
}
});
} else {
_proxyListener.onError("Error handing proxy event.", e);
}
}
}
private void handleErrorsFromInternalMessageDispatcher(String info, Exception e) {
DebugTool.logError(info, e);
// This error cannot be passed to the user, as it indicates an error
// in the communication between the proxy and the application.
DebugTool.logError("InternalMessageDispatcher failed.", e);
// Note, this is the only place where the _proxyListener should be referenced asdlhronously,
// with an error on the internalMessageDispatcher, we have no other reliable way of
// communicating with the application.
notifyProxyClosed("Proxy callback dispatcher is down. Proxy instance is invalid.", e, SdlDisconnectedReason.GENERIC_ERROR);
_proxyListener.onError("Proxy callback dispatcher is down. Proxy instance is invalid.", e);
}
/************* END Functions used by the Message Dispatching Queues ****************/
// Private sendPRCRequest method. All RPCRequests are funneled through this method after
// error checking.
private void sendRPCRequestPrivate(RPCRequest request) throws SdlException {
try {
SdlTrace.logRPCEvent(InterfaceActivityDirection.Transmit, request, SDL_LIB_TRACE_KEY);
request.format(rpcSpecVersion,true);
byte[] msgBytes = JsonRPCMarshaller.marshall(request, (byte)getProtocolVersion().getMajor());
ProtocolMessage pm = new ProtocolMessage();
pm.setData(msgBytes);
if (sdlSession != null)
pm.setSessionID(sdlSession.getSessionId());
pm.setMessageType(MessageType.RPC);
pm.setSessionType(SessionType.RPC);
pm.setFunctionID(FunctionID.getFunctionId(request.getFunctionName()));
pm.setPayloadProtected(request.isPayloadProtected());
if (request.getCorrelationID() == null)
{
//Log error here
throw new SdlException("CorrelationID cannot be null. RPC: " + request.getFunctionName(), SdlExceptionCause.INVALID_ARGUMENT);
}
pm.setCorrID(request.getCorrelationID());
if (request.getBulkData() != null){
pm.setBulkData(request.getBulkData());
}
if(request.getFunctionName().equalsIgnoreCase(FunctionID.PUT_FILE.name())){
pm.setPriorityCoefficient(1);
}
// Queue this outgoing message
synchronized(OUTGOING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.queueMessage(pm);
//Since the message is queued we can add it's listener to our list
OnRPCResponseListener listener = request.getOnRPCResponseListener();
if(request.getMessageType().equals(RPCMessage.KEY_REQUEST)){//We might want to include other message types in the future
addOnRPCResponseListener(listener, request.getCorrelationID(), msgBytes.length);
}
}
}
} catch (OutOfMemoryError e) {
SdlTrace.logProxyEvent("OutOfMemory exception while sending request " + request.getFunctionName(), SDL_LIB_TRACE_KEY);
throw new SdlException("OutOfMemory exception while sending request " + request.getFunctionName(), e, SdlExceptionCause.INVALID_ARGUMENT);
}
}
/**
* Only call this method for a PutFile response. It will cause a class cast exception if not.
* @param correlationId correlation id of the packet being updated
* @param bytesWritten how many bytes were written
* @param totalSize the total size in bytes
*/
@SuppressWarnings("unused")
public void onPacketProgress(int correlationId, long bytesWritten, long totalSize){
synchronized(ON_UPDATE_LISTENER_LOCK){
if(rpcResponseListeners !=null
&& rpcResponseListeners.indexOfKey(correlationId)>=0){
((OnPutFileUpdateListener)rpcResponseListeners.get(correlationId)).onUpdate(correlationId, bytesWritten, totalSize);
}
}
}
/**
* Will provide callback to the listener either onFinish or onError depending on the RPCResponses result code,
* <p>Will automatically remove the listener for the list of listeners on completion.
* @param msg The RPCResponse message that was received
* @return if a listener was called or not
*/
@SuppressWarnings("UnusedReturnValue")
private boolean onRPCResponseReceived(RPCResponse msg){
synchronized(ON_UPDATE_LISTENER_LOCK){
int correlationId = msg.getCorrelationID();
if(rpcResponseListeners !=null
&& rpcResponseListeners.indexOfKey(correlationId)>=0){
OnRPCResponseListener listener = rpcResponseListeners.get(correlationId);
if(msg.getSuccess()){
listener.onResponse(correlationId, msg);
}else{
listener.onError(correlationId, msg.getResultCode(), msg.getInfo());
}
rpcResponseListeners.remove(correlationId);
return true;
}
return false;
}
}
/**
* Add a listener that will receive the response to the specific RPCRequest sent with the corresponding correlation id
* @param listener that will get called back when a response is received
* @param correlationId of the RPCRequest that was sent
* @param totalSize only include if this is an OnPutFileUpdateListener. Otherwise it will be ignored.
*/
public void addOnRPCResponseListener(OnRPCResponseListener listener,int correlationId, int totalSize){
synchronized(ON_UPDATE_LISTENER_LOCK){
if(rpcResponseListeners!=null
&& listener !=null){
if(listener.getListenerType() == OnRPCResponseListener.UPDATE_LISTENER_TYPE_PUT_FILE){
((OnPutFileUpdateListener)listener).setTotalSize(totalSize);
}
listener.onStart(correlationId);
rpcResponseListeners.put(correlationId, listener);
}
}
}
@SuppressWarnings("unused")
public SparseArray<OnRPCResponseListener> getResponseListeners(){
synchronized(ON_UPDATE_LISTENER_LOCK){
return this.rpcResponseListeners;
}
}
@SuppressWarnings("UnusedReturnValue")
public boolean onRPCNotificationReceived(RPCNotification notification){
synchronized(ON_NOTIFICATION_LISTENER_LOCK){
CopyOnWriteArrayList<OnRPCNotificationListener> listeners = rpcNotificationListeners.get(FunctionID.getFunctionId(notification.getFunctionName()));
if(listeners!=null && listeners.size()>0) {
for (OnRPCNotificationListener listener : listeners) {
listener.onNotified(notification);
}
return true;
}
return false;
}
}
/**
* This will ad a listener for the specific type of notification. As of now it will only allow
* a single listener per notification function id
* @param notificationId The notification type that this listener is designated for
* @param listener The listener that will be called when a notification of the provided type is received
*/
@SuppressWarnings("unused")
public void addOnRPCNotificationListener(FunctionID notificationId, OnRPCNotificationListener listener){
synchronized(ON_NOTIFICATION_LISTENER_LOCK){
if(notificationId != null && listener != null){
if(rpcNotificationListeners.indexOfKey(notificationId.getId()) < 0 ){
rpcNotificationListeners.put(notificationId.getId(),new CopyOnWriteArrayList<OnRPCNotificationListener>());
}
rpcNotificationListeners.get(notificationId.getId()).add(listener);
}
}
}
/**
* This method is no longer valid and will not remove the listener for the supplied notificaiton id
* @param notificationId n/a
* @see #removeOnRPCNotificationListener(FunctionID, OnRPCNotificationListener)
*/
@SuppressWarnings("unused")
@Deprecated
public void removeOnRPCNotificationListener(FunctionID notificationId){
synchronized(ON_NOTIFICATION_LISTENER_LOCK){
//rpcNotificationListeners.delete(notificationId.getId());
}
}
public boolean removeOnRPCNotificationListener(FunctionID notificationId, OnRPCNotificationListener listener){
synchronized(ON_NOTIFICATION_LISTENER_LOCK){
if(rpcNotificationListeners!= null
&& notificationId != null
&& listener != null
&& rpcNotificationListeners.indexOfKey(notificationId.getId()) >= 0){
return rpcNotificationListeners.get(notificationId.getId()).remove(listener);
}
}
return false;
}
private void processRaiResponse(RegisterAppInterfaceResponse rai)
{
if (rai == null) return;
VehicleType vt = rai.getVehicleType();
if (vt == null) return;
String make = vt.getMake();
if (make == null) return;
if (_secList == null) return;
SdlSecurityBase sec;
Service svc = getService();
SdlSecurityBase.setAppService(svc);
if (svc != null && svc.getApplicationContext() != null){
SdlSecurityBase.setContext(svc.getApplicationContext());
} else {
SdlSecurityBase.setContext(_appContext);
}
for (Class<? extends SdlSecurityBase> cls : _secList)
{
try
{
sec = cls.newInstance();
}
catch (Exception e)
{
continue;
}
if ( (sec != null) && (sec.getMakeList() != null) )
{
if (sec.getMakeList().contains(make))
{
setSdlSecurity(sec);
sec.setAppId(_appID);
if (sdlSession != null)
sec.handleSdlSession(sdlSession);
return;
}
}
}
}
private void handleRPCMessage(Hashtable<String, Object> hash) {
RPCMessage rpcMsg = new RPCMessage(hash);
//Call format to ensure the RPC is ready to be handled regardless of RPC spec version
String functionName = rpcMsg.getFunctionName();
String messageType = rpcMsg.getMessageType();
if (messageType.equals(RPCMessage.KEY_RESPONSE)) {
SdlTrace.logRPCEvent(InterfaceActivityDirection.Receive, new RPCResponse(rpcMsg), SDL_LIB_TRACE_KEY);
// Check to ensure response is not from an internal message (reserved correlation ID)
if (isCorrelationIDProtected((new RPCResponse(hash)).getCorrelationID())) {
// This is a response generated from an internal message, it can be trapped here
// The app should not receive a response for a request it did not send
if ((new RPCResponse(hash)).getCorrelationID() == REGISTER_APP_INTERFACE_CORRELATION_ID
&& _advancedLifecycleManagementEnabled
&& functionName.equals(FunctionID.REGISTER_APP_INTERFACE.toString())) {
final RegisterAppInterfaceResponse msg = new RegisterAppInterfaceResponse(hash);
msg.format(rpcSpecVersion, true);
if (msg.getSuccess()) {
_appInterfaceRegisterd = true;
}
processRaiResponse(msg);
//Populate the system capability manager with the RAI response
_systemCapabilityManager.parseRAIResponse(msg);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.REGISTER_APP_INTERFACE.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
//_autoActivateIdReturned = msg.getAutoActivateID();
/*Place holder for legacy support*/ _autoActivateIdReturned = "8675309";
_prerecordedSpeech = msg.getPrerecordedSpeech();
_sdlLanguage = msg.getLanguage();
_hmiDisplayLanguage = msg.getHmiDisplayLanguage();
_sdlMsgVersion = msg.getSdlMsgVersion();
if(_sdlMsgVersion != null){
rpcSpecVersion = new com.smartdevicelink.util.Version(_sdlMsgVersion.getMajorVersion(),_sdlMsgVersion.getMinorVersion(), _sdlMsgVersion.getPatchVersion());
}else{
rpcSpecVersion = MAX_SUPPORTED_RPC_VERSION;
}
_vehicleType = msg.getVehicleType();
_systemSoftwareVersion = msg.getSystemSoftwareVersion();
_proxyVersionInfo = msg.getProxyVersionInfo();
_iconResumed = msg.getIconResumed();
if (_iconResumed == null){
_iconResumed = false;
}
if (_bAppResumeEnabled)
{
if ( (_sdlMsgVersion.getMajorVersion() > 2) && (_lastHashID != null) && (msg.getSuccess()) && (msg.getResultCode() != Result.RESUME_FAILED) )
_bResumeSuccess = true;
else
{
_bResumeSuccess = false;
_lastHashID = null;
}
}
_diagModes = msg.getSupportedDiagModes();
String sVersionInfo = "SDL Proxy Version: " + _proxyVersionInfo;
if (!isDebugEnabled())
{
enableDebugTool();
DebugTool.logInfo(sVersionInfo, false);
disableDebugTool();
}
else
DebugTool.logInfo(sVersionInfo, false);
sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "RAI_RESPONSE");
updateBroadcastIntent(sendIntent, "COMMENT1", sVersionInfo);
sendBroadcastIntent(sendIntent);
// Send onSdlConnected message in ALM
_sdlConnectionState = SdlConnectionState.SDL_CONNECTED;
// If registerAppInterface failed, exit with OnProxyUnusable
if (!msg.getSuccess()) {
notifyProxyClosed("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: ",
new SdlException("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: " + msg.getResultCode(), SdlExceptionCause.SDL_REGISTRATION_ERROR), SdlDisconnectedReason.SDL_REGISTRATION_ERROR);
}
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onRPCResponseReceived(msg);
}
});
} else {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onRPCResponseReceived(msg);
}
} else if ((new RPCResponse(hash)).getCorrelationID() == POLICIES_CORRELATION_ID
&& functionName.equals(FunctionID.ON_ENCODED_SYNC_P_DATA.toString())) {
Log.i("pt", "POLICIES_CORRELATION_ID SystemRequest Notification (Legacy)");
final OnSystemRequest msg = new OnSystemRequest(hash);
// If url is not null, then send to URL
if ( (msg.getUrl() != null) )
{
// URL has data, attempt to post request to external server
Thread handleOffboardTransmissionThread = new Thread() {
@Override
public void run() {
sendOnSystemRequestToUrl(msg);
}
};
handleOffboardTransmissionThread.start();
}
}
else if ((new RPCResponse(hash)).getCorrelationID() == POLICIES_CORRELATION_ID
&& functionName.equals(FunctionID.ENCODED_SYNC_P_DATA.toString())) {
Log.i("pt", "POLICIES_CORRELATION_ID SystemRequest Response (Legacy)");
final SystemRequestResponse msg = new SystemRequestResponse(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.SYSTEM_REQUEST.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
}
else if ((new RPCResponse(hash)).getCorrelationID() == POLICIES_CORRELATION_ID
&& functionName.equals(FunctionID.SYSTEM_REQUEST.toString())) {
final SystemRequestResponse msg = new SystemRequestResponse(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.SYSTEM_REQUEST.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
updateBroadcastIntent(sendIntent, "DATA", serializeJSON(msg));
sendBroadcastIntent(sendIntent);
}
else if (functionName.equals(FunctionID.UNREGISTER_APP_INTERFACE.toString())) {
// UnregisterAppInterface
_appInterfaceRegisterd = false;
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
APP_INTERFACE_REGISTERED_LOCK.notify();
}
final UnregisterAppInterfaceResponse msg = new UnregisterAppInterfaceResponse(hash);
msg.format(rpcSpecVersion, true);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.UNREGISTER_APP_INTERFACE.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
}
return;
}
if (functionName.equals(FunctionID.REGISTER_APP_INTERFACE.toString())) {
final RegisterAppInterfaceResponse msg = new RegisterAppInterfaceResponse(hash);
msg.format(rpcSpecVersion, true);
if (msg.getSuccess()) {
_appInterfaceRegisterd = true;
}
processRaiResponse(msg);
//Populate the system capability manager with the RAI response
_systemCapabilityManager.parseRAIResponse(msg);
//_autoActivateIdReturned = msg.getAutoActivateID();
/*Place holder for legacy support*/ _autoActivateIdReturned = "8675309";
_prerecordedSpeech = msg.getPrerecordedSpeech();
_sdlLanguage = msg.getLanguage();
_hmiDisplayLanguage = msg.getHmiDisplayLanguage();
_sdlMsgVersion = msg.getSdlMsgVersion();
rpcSpecVersion = new com.smartdevicelink.util.Version(_sdlMsgVersion.getMajorVersion(),_sdlMsgVersion.getMinorVersion(), _sdlMsgVersion.getPatchVersion());
_vehicleType = msg.getVehicleType();
_systemSoftwareVersion = msg.getSystemSoftwareVersion();
_proxyVersionInfo = msg.getProxyVersionInfo();
if (_bAppResumeEnabled)
{
if ( (_sdlMsgVersion.getMajorVersion() > 2) && (_lastHashID != null) && (msg.getSuccess()) && (msg.getResultCode() != Result.RESUME_FAILED) )
_bResumeSuccess = true;
else
{
_bResumeSuccess = false;
_lastHashID = null;
}
}
_diagModes = msg.getSupportedDiagModes();
if (!isDebugEnabled())
{
enableDebugTool();
DebugTool.logInfo("SDL Proxy Version: " + _proxyVersionInfo);
disableDebugTool();
}
else
DebugTool.logInfo("SDL Proxy Version: " + _proxyVersionInfo);
// RegisterAppInterface
if (_advancedLifecycleManagementEnabled) {
// Send onSdlConnected message in ALM
_sdlConnectionState = SdlConnectionState.SDL_CONNECTED;
// If registerAppInterface failed, exit with OnProxyUnusable
if (!msg.getSuccess()) {
notifyProxyClosed("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: ",
new SdlException("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: " + msg.getResultCode(), SdlExceptionCause.SDL_REGISTRATION_ERROR), SdlDisconnectedReason.SDL_REGISTRATION_ERROR);
}
} else {
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onRPCResponseReceived(msg);
}
});
} else {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onRPCResponseReceived(msg);
}
}
} else if (functionName.equals(FunctionID.SPEAK.toString())) {
// SpeakResponse
final SpeakResponse msg = new SpeakResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSpeakResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSpeakResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.ALERT.toString())) {
// AlertResponse
final AlertResponse msg = new AlertResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onAlertResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onAlertResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SHOW.toString())) {
// ShowResponse
final ShowResponse msg = new ShowResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onShowResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onShowResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.ADD_COMMAND.toString())) {
// AddCommand
final AddCommandResponse msg = new AddCommandResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onAddCommandResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onAddCommandResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.DELETE_COMMAND.toString())) {
// DeleteCommandResponse
final DeleteCommandResponse msg = new DeleteCommandResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteCommandResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onDeleteCommandResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.ADD_SUB_MENU.toString())) {
// AddSubMenu
final AddSubMenuResponse msg = new AddSubMenuResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onAddSubMenuResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onAddSubMenuResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.DELETE_SUB_MENU.toString())) {
// DeleteSubMenu
final DeleteSubMenuResponse msg = new DeleteSubMenuResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteSubMenuResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onDeleteSubMenuResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SUBSCRIBE_BUTTON.toString())) {
// SubscribeButton
final SubscribeButtonResponse msg = new SubscribeButtonResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSubscribeButtonResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSubscribeButtonResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.UNSUBSCRIBE_BUTTON.toString())) {
// UnsubscribeButton
final UnsubscribeButtonResponse msg = new UnsubscribeButtonResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onUnsubscribeButtonResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onUnsubscribeButtonResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SET_MEDIA_CLOCK_TIMER.toString())) {
// SetMediaClockTimer
final SetMediaClockTimerResponse msg = new SetMediaClockTimerResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetMediaClockTimerResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSetMediaClockTimerResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.ENCODED_SYNC_P_DATA.toString())) {
final SystemRequestResponse msg = new SystemRequestResponse(hash);
msg.format(rpcSpecVersion,true);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.SYSTEM_REQUEST.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSystemRequestResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSystemRequestResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.CREATE_INTERACTION_CHOICE_SET.toString())) {
// CreateInteractionChoiceSet
final CreateInteractionChoiceSetResponse msg = new CreateInteractionChoiceSetResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onCreateInteractionChoiceSetResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onCreateInteractionChoiceSetResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.DELETE_INTERACTION_CHOICE_SET.toString())) {
// DeleteInteractionChoiceSet
final DeleteInteractionChoiceSetResponse msg = new DeleteInteractionChoiceSetResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteInteractionChoiceSetResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onDeleteInteractionChoiceSetResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.PERFORM_INTERACTION.toString())) {
// PerformInteraction
final PerformInteractionResponse msg = new PerformInteractionResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onPerformInteractionResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onPerformInteractionResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SET_GLOBAL_PROPERTIES.toString())) {
// SetGlobalPropertiesResponse
final SetGlobalPropertiesResponse msg = new SetGlobalPropertiesResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetGlobalPropertiesResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSetGlobalPropertiesResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.RESET_GLOBAL_PROPERTIES.toString())) {
// ResetGlobalProperties
final ResetGlobalPropertiesResponse msg = new ResetGlobalPropertiesResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onResetGlobalPropertiesResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onResetGlobalPropertiesResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.UNREGISTER_APP_INTERFACE.toString())) {
// UnregisterAppInterface
_appInterfaceRegisterd = false;
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
APP_INTERFACE_REGISTERED_LOCK.notify();
}
final UnregisterAppInterfaceResponse msg = new UnregisterAppInterfaceResponse(hash);
msg.format(rpcSpecVersion,true);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.UNREGISTER_APP_INTERFACE.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onUnregisterAppInterfaceResponse(msg);
}
onRPCResponseReceived(msg);
}
});
} else {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onUnregisterAppInterfaceResponse(msg);
}
onRPCResponseReceived(msg);
}
notifyProxyClosed("UnregisterAppInterfaceResponse", null, SdlDisconnectedReason.APP_INTERFACE_UNREG);
} else if (functionName.equals(FunctionID.GENERIC_RESPONSE.toString())) {
// GenericResponse (Usually and error)
final GenericResponse msg = new GenericResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGenericResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onGenericResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SLIDER.toString())) {
// Slider
final SliderResponse msg = new SliderResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSliderResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSliderResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.PUT_FILE.toString())) {
// PutFile
final PutFileResponse msg = new PutFileResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onPutFileResponse(msg);
onRPCResponseReceived(msg);
notifyPutFileStreamResponse(msg);
}
});
} else {
_proxyListener.onPutFileResponse(msg);
onRPCResponseReceived(msg);
notifyPutFileStreamResponse(msg);
}
} else if (functionName.equals(FunctionID.DELETE_FILE.toString())) {
// DeleteFile
final DeleteFileResponse msg = new DeleteFileResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteFileResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onDeleteFileResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.LIST_FILES.toString())) {
// ListFiles
final ListFilesResponse msg = new ListFilesResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onListFilesResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onListFilesResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SET_APP_ICON.toString())) {
// SetAppIcon
final SetAppIconResponse msg = new SetAppIconResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetAppIconResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSetAppIconResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SCROLLABLE_MESSAGE.toString())) {
// ScrollableMessage
final ScrollableMessageResponse msg = new ScrollableMessageResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onScrollableMessageResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onScrollableMessageResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.CHANGE_REGISTRATION.toString())) {
// ChangeLanguageRegistration
final ChangeRegistrationResponse msg = new ChangeRegistrationResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onChangeRegistrationResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onChangeRegistrationResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SET_DISPLAY_LAYOUT.toString())) {
// SetDisplayLayout
final SetDisplayLayoutResponse msg = new SetDisplayLayoutResponse(hash);
msg.format(rpcSpecVersion,true);
// successfully changed display layout - update layout capabilities
if(msg.getSuccess() && _systemCapabilityManager!=null){
_systemCapabilityManager.setCapability(SystemCapabilityType.DISPLAY, msg.getDisplayCapabilities());
_systemCapabilityManager.setCapability(SystemCapabilityType.BUTTON, msg.getButtonCapabilities());
_systemCapabilityManager.setCapability(SystemCapabilityType.PRESET_BANK, msg.getPresetBankCapabilities());
_systemCapabilityManager.setCapability(SystemCapabilityType.SOFTBUTTON, msg.getSoftButtonCapabilities());
}
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetDisplayLayoutResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSetDisplayLayoutResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.PERFORM_AUDIO_PASS_THRU.toString())) {
// PerformAudioPassThru
final PerformAudioPassThruResponse msg = new PerformAudioPassThruResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onPerformAudioPassThruResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onPerformAudioPassThruResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.END_AUDIO_PASS_THRU.toString())) {
// EndAudioPassThru
final EndAudioPassThruResponse msg = new EndAudioPassThruResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onEndAudioPassThruResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onEndAudioPassThruResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SUBSCRIBE_VEHICLE_DATA.toString())) {
// SubscribeVehicleData
final SubscribeVehicleDataResponse msg = new SubscribeVehicleDataResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSubscribeVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSubscribeVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.UNSUBSCRIBE_VEHICLE_DATA.toString())) {
// UnsubscribeVehicleData
final UnsubscribeVehicleDataResponse msg = new UnsubscribeVehicleDataResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onUnsubscribeVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onUnsubscribeVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.GET_VEHICLE_DATA.toString())) {
// GetVehicleData
final GetVehicleDataResponse msg = new GetVehicleDataResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGetVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onGetVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SUBSCRIBE_WAY_POINTS.toString())) {
// SubscribeWayPoints
final SubscribeWayPointsResponse msg = new SubscribeWayPointsResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSubscribeWayPointsResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSubscribeWayPointsResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.UNSUBSCRIBE_WAY_POINTS.toString())) {
// UnsubscribeWayPoints
final UnsubscribeWayPointsResponse msg = new UnsubscribeWayPointsResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onUnsubscribeWayPointsResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onUnsubscribeWayPointsResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.GET_WAY_POINTS.toString())) {
// GetWayPoints
final GetWayPointsResponse msg = new GetWayPointsResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGetWayPointsResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onGetWayPointsResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.READ_DID.toString())) {
final ReadDIDResponse msg = new ReadDIDResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onReadDIDResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onReadDIDResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.GET_DTCS.toString())) {
final GetDTCsResponse msg = new GetDTCsResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGetDTCsResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onGetDTCsResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.DIAGNOSTIC_MESSAGE.toString())) {
final DiagnosticMessageResponse msg = new DiagnosticMessageResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDiagnosticMessageResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onDiagnosticMessageResponse(msg);
onRPCResponseReceived(msg);
}
}
else if (functionName.equals(FunctionID.SYSTEM_REQUEST.toString())) {
final SystemRequestResponse msg = new SystemRequestResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSystemRequestResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSystemRequestResponse(msg);
onRPCResponseReceived(msg);
}
}
else if (functionName.equals(FunctionID.SEND_LOCATION.toString())) {
final SendLocationResponse msg = new SendLocationResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSendLocationResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSendLocationResponse(msg);
onRPCResponseReceived(msg);
}
}
else if (functionName.equals(FunctionID.DIAL_NUMBER.toString())) {
final DialNumberResponse msg = new DialNumberResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDialNumberResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onDialNumberResponse(msg);
onRPCResponseReceived(msg);
}
}
else if (functionName.equals(FunctionID.SHOW_CONSTANT_TBT.toString())) {
final ShowConstantTbtResponse msg = new ShowConstantTbtResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onShowConstantTbtResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onShowConstantTbtResponse(msg);
onRPCResponseReceived(msg);
}
}
else if (functionName.equals(FunctionID.ALERT_MANEUVER.toString())) {
final AlertManeuverResponse msg = new AlertManeuverResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onAlertManeuverResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onAlertManeuverResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.UPDATE_TURN_LIST.toString())) {
final UpdateTurnListResponse msg = new UpdateTurnListResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onUpdateTurnListResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onUpdateTurnListResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SET_INTERIOR_VEHICLE_DATA.toString())) {
final SetInteriorVehicleDataResponse msg = new SetInteriorVehicleDataResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetInteriorVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSetInteriorVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.GET_INTERIOR_VEHICLE_DATA.toString())) {
final GetInteriorVehicleDataResponse msg = new GetInteriorVehicleDataResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGetInteriorVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onGetInteriorVehicleDataResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.GET_SYSTEM_CAPABILITY.toString())) {
// GetSystemCapabilityResponse
final GetSystemCapabilityResponse msg = new GetSystemCapabilityResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGetSystemCapabilityResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onGetSystemCapabilityResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.BUTTON_PRESS.toString())) {
final ButtonPressResponse msg = new ButtonPressResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onButtonPressResponse(msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onButtonPressResponse(msg);
onRPCResponseReceived(msg);
}
} else if (functionName.equals(FunctionID.SEND_HAPTIC_DATA.toString())) {
final SendHapticDataResponse msg = new SendHapticDataResponse(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSendHapticDataResponse( msg);
onRPCResponseReceived(msg);
}
});
} else {
_proxyListener.onSendHapticDataResponse( msg);
onRPCResponseReceived(msg);
}
}
else {
if (_sdlMsgVersion != null) {
DebugTool.logError("Unrecognized response Message: " + functionName +
" SDL Message Version = " + _sdlMsgVersion);
} else {
DebugTool.logError("Unrecognized response Message: " + functionName);
}
} // end-if
} else if (messageType.equals(RPCMessage.KEY_NOTIFICATION)) {
SdlTrace.logRPCEvent(InterfaceActivityDirection.Receive, new RPCNotification(rpcMsg), SDL_LIB_TRACE_KEY);
if (functionName.equals(FunctionID.ON_HMI_STATUS.toString())) {
// OnHMIStatus
final OnHMIStatus msg = new OnHMIStatus(hash);
//setup lockscreeninfo
if (sdlSession != null)
{
sdlSession.getLockScreenMan().setHMILevel(msg.getHmiLevel());
}
msg.setFirstRun(firstTimeFull);
if (msg.getHmiLevel() == HMILevel.HMI_FULL) firstTimeFull = false;
_hmiLevel = msg.getHmiLevel();
_audioStreamingState = msg.getAudioStreamingState();
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnHMIStatus(msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnHMIStatus(msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_COMMAND.toString())) {
// OnCommand
final OnCommand msg = new OnCommand(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnCommand(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnCommand(msg);
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_DRIVER_DISTRACTION.toString())) {
// OnDriverDistration
final OnDriverDistraction msg = new OnDriverDistraction(hash);
//setup lockscreeninfo
if (sdlSession != null)
{
DriverDistractionState drDist = msg.getState();
sdlSession.getLockScreenMan().setDriverDistStatus(drDist == DriverDistractionState.DD_ON);
}
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnDriverDistraction(msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnDriverDistraction(msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_ENCODED_SYNC_P_DATA.toString())) {
final OnSystemRequest msg = new OnSystemRequest(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.ON_SYSTEM_REQUEST.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_NOTIFICATION);
// If url is null, then send notification to the app, otherwise, send to URL
if (msg.getUrl() == null) {
updateBroadcastIntent(sendIntent, "COMMENT1", "URL is a null value (received)");
sendBroadcastIntent(sendIntent);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnSystemRequest(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnSystemRequest(msg);
onRPCNotificationReceived(msg);
}
} else {
updateBroadcastIntent(sendIntent, "COMMENT1", "Sending to cloud: " + msg.getUrl());
sendBroadcastIntent(sendIntent);
Log.i("pt", "send to url");
if ( (msg.getUrl() != null) )
{
Thread handleOffboardTransmissionThread = new Thread() {
@Override
public void run() {
sendOnSystemRequestToUrl(msg);
}
};
handleOffboardTransmissionThread.start();
}
}
} else if (functionName.equals(FunctionID.ON_PERMISSIONS_CHANGE.toString())) {
//OnPermissionsChange
final OnPermissionsChange msg = new OnPermissionsChange(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnPermissionsChange(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnPermissionsChange(msg);
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_TBT_CLIENT_STATE.toString())) {
// OnTBTClientState
final OnTBTClientState msg = new OnTBTClientState(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnTBTClientState(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnTBTClientState(msg);
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_BUTTON_PRESS.toString())) {
// OnButtonPress
final OnButtonPress msg = new OnButtonPress(hash);
msg.format(rpcSpecVersion, true);
final OnButtonPress onButtonPressCompat = (OnButtonPress)handleButtonNotificationFormatting(msg);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnButtonPress(msg);
onRPCNotificationReceived(msg);
if(onButtonPressCompat != null){
_proxyListener.onOnButtonPress(onButtonPressCompat);
}
}
});
} else {
_proxyListener.onOnButtonPress(msg);
onRPCNotificationReceived(msg);
if(onButtonPressCompat != null){
_proxyListener.onOnButtonPress(onButtonPressCompat);
}
}
} else if (functionName.equals(FunctionID.ON_BUTTON_EVENT.toString())) {
// OnButtonEvent
final OnButtonEvent msg = new OnButtonEvent(hash);
msg.format(rpcSpecVersion, true);
final OnButtonEvent onButtonEventCompat = (OnButtonEvent)handleButtonNotificationFormatting(msg);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnButtonEvent(msg);
onRPCNotificationReceived(msg);
if(onButtonEventCompat != null){
_proxyListener.onOnButtonEvent(onButtonEventCompat);
}
}
});
} else {
_proxyListener.onOnButtonEvent(msg);
onRPCNotificationReceived(msg);
if(onButtonEventCompat != null){
_proxyListener.onOnButtonEvent(onButtonEventCompat);
}
}
} else if (functionName.equals(FunctionID.ON_LANGUAGE_CHANGE.toString())) {
// OnLanguageChange
final OnLanguageChange msg = new OnLanguageChange(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnLanguageChange(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnLanguageChange(msg);
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_HASH_CHANGE.toString())) {
// OnLanguageChange
final OnHashChange msg = new OnHashChange(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnHashChange(msg);
onRPCNotificationReceived(msg);
if (_bAppResumeEnabled)
{
_lastHashID = msg.getHashID();
}
}
});
} else {
_proxyListener.onOnHashChange(msg);
onRPCNotificationReceived(msg);
if (_bAppResumeEnabled)
{
_lastHashID = msg.getHashID();
}
}
} else if (functionName.equals(FunctionID.ON_SYSTEM_REQUEST.toString())) {
// OnSystemRequest
final OnSystemRequest msg = new OnSystemRequest(hash);
msg.format(rpcSpecVersion,true);
if ((msg.getUrl() != null) &&
(((msg.getRequestType() == RequestType.PROPRIETARY) && (msg.getFileType() == FileType.JSON))
|| ((msg.getRequestType() == RequestType.HTTP) && (msg.getFileType() == FileType.BINARY)))){
Thread handleOffboardTransmissionThread = new Thread() {
@Override
public void run() {
sendOnSystemRequestToUrl(msg);
}
};
handleOffboardTransmissionThread.start();
}
if(msg.getRequestType() == RequestType.LOCK_SCREEN_ICON_URL &&
msg.getUrl() != null){
lockScreenIconRequest = msg;
}
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnSystemRequest(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnSystemRequest(msg);
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_AUDIO_PASS_THRU.toString())) {
// OnAudioPassThru
final OnAudioPassThru msg = new OnAudioPassThru(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnAudioPassThru(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnAudioPassThru(msg);
onRPCNotificationReceived(msg);
}
} else if (functionName.equals(FunctionID.ON_VEHICLE_DATA.toString())) {
// OnVehicleData
final OnVehicleData msg = new OnVehicleData(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnVehicleData(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnVehicleData(msg);
onRPCNotificationReceived(msg);
}
}
else if (functionName.equals(FunctionID.ON_APP_INTERFACE_UNREGISTERED.toString())) {
// OnAppInterfaceUnregistered
_appInterfaceRegisterd = false;
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
APP_INTERFACE_REGISTERED_LOCK.notify();
}
final OnAppInterfaceUnregistered msg = new OnAppInterfaceUnregistered(hash);
msg.format(rpcSpecVersion,true);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.ON_APP_INTERFACE_UNREGISTERED.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_NOTIFICATION);
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
sendBroadcastIntent(sendIntent);
if (_advancedLifecycleManagementEnabled) {
// This requires the proxy to be cycled
cycleProxy(SdlDisconnectedReason.convertAppInterfaceUnregisteredReason(msg.getReason()));
} else {
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
((IProxyListener)_proxyListener).onOnAppInterfaceUnregistered(msg);
onRPCNotificationReceived(msg);
}
});
} else {
((IProxyListener)_proxyListener).onOnAppInterfaceUnregistered(msg);
onRPCNotificationReceived(msg);
}
notifyProxyClosed("OnAppInterfaceUnregistered", null, SdlDisconnectedReason.APP_INTERFACE_UNREG);
}
}
else if (functionName.equals(FunctionID.ON_KEYBOARD_INPUT.toString())) {
final OnKeyboardInput msg = new OnKeyboardInput(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnKeyboardInput(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnKeyboardInput(msg);
onRPCNotificationReceived(msg);
}
}
else if (functionName.equals(FunctionID.ON_TOUCH_EVENT.toString())) {
final OnTouchEvent msg = new OnTouchEvent(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnTouchEvent(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnTouchEvent(msg);
onRPCNotificationReceived(msg);
}
}
else if (functionName.equals(FunctionID.ON_WAY_POINT_CHANGE.toString())) {
final OnWayPointChange msg = new OnWayPointChange(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnWayPointChange(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnWayPointChange(msg);
onRPCNotificationReceived(msg);
}
}
else if (functionName.equals(FunctionID.ON_INTERIOR_VEHICLE_DATA.toString())) {
final OnInteriorVehicleData msg = new OnInteriorVehicleData(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnInteriorVehicleData(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnInteriorVehicleData(msg);
onRPCNotificationReceived(msg);
}
}
else if (functionName.equals(FunctionID.ON_RC_STATUS.toString())) {
final OnRCStatus msg = new OnRCStatus(hash);
msg.format(rpcSpecVersion, true);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnRCStatus(msg);
onRPCNotificationReceived(msg);
}
});
} else {
_proxyListener.onOnRCStatus(msg);
onRPCNotificationReceived(msg);
}
}
else {
if (_sdlMsgVersion != null) {
DebugTool.logInfo("Unrecognized notification Message: " + functionName +
" connected to SDL using message version: " + _sdlMsgVersion.getMajorVersion() + "." + _sdlMsgVersion.getMinorVersion());
} else {
DebugTool.logInfo("Unrecognized notification Message: " + functionName);
}
} // end-if
} // end-if notification
SdlTrace.logProxyEvent("Proxy received RPC Message: " + functionName, SDL_LIB_TRACE_KEY);
}
//FIXME
/**
* Temporary method to bridge the new PLAY_PAUSE and OKAY button functionality with the old
* OK button name. This should be removed during the next major release
* @param notification
*/
private RPCNotification handleButtonNotificationFormatting(RPCNotification notification){
if(FunctionID.ON_BUTTON_EVENT.toString().equals(notification.getFunctionName())
|| FunctionID.ON_BUTTON_PRESS.toString().equals(notification.getFunctionName())){
ButtonName buttonName = (ButtonName)notification.getObject(ButtonName.class, OnButtonEvent.KEY_BUTTON_NAME);
ButtonName compatBtnName = null;
if(rpcSpecVersion != null && rpcSpecVersion.getMajor() >= 5){
if(ButtonName.PLAY_PAUSE.equals(buttonName)){
compatBtnName = ButtonName.OK;
}
}else{ // rpc spec version is either null or less than 5
if(ButtonName.OK.equals(buttonName)){
compatBtnName = ButtonName.PLAY_PAUSE;
}
}
try {
if (compatBtnName != null) { //There is a button name that needs to be swapped out
RPCNotification notification2;
//The following is done because there is currently no way to make a deep copy
//of an RPC. Since this code will be removed, it's ugliness is borderline acceptable.
if (notification instanceof OnButtonEvent) {
OnButtonEvent onButtonEvent = new OnButtonEvent();
onButtonEvent.setButtonEventMode(((OnButtonEvent) notification).getButtonEventMode());
onButtonEvent.setCustomButtonID(((OnButtonEvent) notification).getCustomButtonID());
notification2 = onButtonEvent;
} else if (notification instanceof OnButtonPress) {
OnButtonPress onButtonPress = new OnButtonPress();
onButtonPress.setButtonPressMode(((OnButtonPress) notification).getButtonPressMode());
onButtonPress.setCustomButtonName(((OnButtonPress) notification).getCustomButtonName());
notification2 = onButtonPress;
} else {
return null;
}
notification2.setParameters(OnButtonEvent.KEY_BUTTON_NAME, compatBtnName);
return notification2;
}
}catch (Exception e){
//Should never get here
}
}
return null;
}
/**
* Get SDL Message Version
* @return SdlMsgVersion
* @throws SdlException
*/
public SdlMsgVersion getSdlMsgVersion() throws SdlException{
return _sdlMsgVersion;
}
/**
* Takes a list of RPCRequests and sends it to SDL in a synchronous fashion. Responses are captured through callback on OnMultipleRequestListener.
* For sending requests asynchronously, use sendRequests <br>
*
* <strong>NOTE: This will override any listeners on individual RPCs</strong>
*
* @param rpcs is the list of RPCRequests being sent
* @param listener listener for updates and completions
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void sendSequentialRequests(final List<? extends RPCRequest> rpcs, final OnMultipleRequestListener listener) throws SdlException {
if (_proxyDisposed) {
throw new SdlException("This object has been disposed, it is no long capable of executing methods.", SdlExceptionCause.SDL_PROXY_DISPOSED);
}
SdlTrace.logProxyEvent("Application called sendSequentialRequests", SDL_LIB_TRACE_KEY);
synchronized(CONNECTION_REFERENCE_LOCK) {
if (!getIsConnected()) {
SdlTrace.logProxyEvent("Application attempted to call sendSequentialRequests without a connected transport.", SDL_LIB_TRACE_KEY);
throw new SdlException("There is no valid connection to SDL. sendSequentialRequests cannot be called until SDL has been connected.", SdlExceptionCause.SDL_UNAVAILABLE);
}
}
if (rpcs == null){
//Log error here
throw new SdlException("You must send some RPCs", SdlExceptionCause.INVALID_ARGUMENT);
}
int requestCount = rpcs.size();
// Break out of recursion, we have finished the requests
if (requestCount == 0) {
if(listener != null){
listener.onFinished();
}
return;
}
RPCRequest rpc = rpcs.remove(0);
rpc.setCorrelationID(CorrelationIdGenerator.generateId());
rpc.setOnRPCResponseListener(new OnRPCResponseListener() {
@Override
public void onResponse(int correlationId, RPCResponse response) {
if (response.getSuccess()) {
// success
if(listener!=null){
listener.onUpdate(rpcs.size());
}
try {
// recurse after successful response of RPC
sendSequentialRequests(rpcs, listener);
} catch (SdlException e) {
e.printStackTrace();
if(listener != null){
listener.onError(correlationId, Result.GENERIC_ERROR, e.toString());
}
}
}
}
@Override
public void onError(int correlationId, Result resultCode, String info){
if(listener != null){
listener.onError(correlationId, resultCode, info);
}
}
});
sendRPCRequestPrivate(rpc);
}
/**
* Takes a list of RPCRequests and sends it to SDL. Responses are captured through callback on OnMultipleRequestListener.
* For sending requests synchronously, use sendSequentialRequests <br>
*
* <strong>NOTE: This will override any listeners on individual RPCs</strong>
*
* @param rpcs is the list of RPCRequests being sent
* @param listener listener for updates and completions
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void sendRequests(List<? extends RPCRequest> rpcs, final OnMultipleRequestListener listener) throws SdlException {
if (_proxyDisposed) {
throw new SdlException("This object has been disposed, it is no long capable of executing methods.", SdlExceptionCause.SDL_PROXY_DISPOSED);
}
SdlTrace.logProxyEvent("Application called sendRequests", SDL_LIB_TRACE_KEY);
synchronized(CONNECTION_REFERENCE_LOCK) {
if (!getIsConnected()) {
SdlTrace.logProxyEvent("Application attempted to call sendRequests without a connected transport.", SDL_LIB_TRACE_KEY);
throw new SdlException("There is no valid connection to SDL. sendRequests cannot be called until SDL has been connected.", SdlExceptionCause.SDL_UNAVAILABLE);
}
}
if (rpcs == null){
//Log error here
throw new SdlException("You must send some RPCs, the array is null", SdlExceptionCause.INVALID_ARGUMENT);
}
int arraySize = rpcs.size();
if (arraySize == 0) {
throw new SdlException("You must send some RPCs, the array is empty", SdlExceptionCause.INVALID_ARGUMENT);
}
for (int i = 0; i < arraySize; i++) {
RPCRequest rpc = rpcs.get(i);
rpc.setCorrelationID(CorrelationIdGenerator.generateId());
if(listener != null) {
listener.addCorrelationId(rpc.getCorrelationID());
rpc.setOnRPCResponseListener(listener.getSingleRpcResponseListener());
}
sendRPCRequestPrivate(rpc);
}
}
/**
* Takes an RPCRequest and sends it to SDL. Responses are captured through callback on IProxyListener.
*
* @param request is the RPCRequest being sent
* @throws SdlException if an unrecoverable error is encountered
*/
public void sendRPCRequest(RPCRequest request) throws SdlException {
if (_proxyDisposed) {
throw new SdlException("This object has been disposed, it is no long capable of executing methods.", SdlExceptionCause.SDL_PROXY_DISPOSED);
}
// Test if request is null
if (request == null) {
SdlTrace.logProxyEvent("Application called sendRPCRequest method with a null RPCRequest.", SDL_LIB_TRACE_KEY);
throw new IllegalArgumentException("sendRPCRequest cannot be called with a null request.");
}
SdlTrace.logProxyEvent("Application called sendRPCRequest method for RPCRequest: ." + request.getFunctionName(), SDL_LIB_TRACE_KEY);
// Test if SdlConnection is null
synchronized(CONNECTION_REFERENCE_LOCK) {
if (!getIsConnected()) {
SdlTrace.logProxyEvent("Application attempted to send and RPCRequest without a connected transport.", SDL_LIB_TRACE_KEY);
throw new SdlException("There is no valid connection to SDL. sendRPCRequest cannot be called until SDL has been connected.", SdlExceptionCause.SDL_UNAVAILABLE);
}
}
// Test for illegal correlation ID
if (isCorrelationIDProtected(request.getCorrelationID())) {
SdlTrace.logProxyEvent("Application attempted to use the reserved correlation ID, " + request.getCorrelationID(), SDL_LIB_TRACE_KEY);
throw new SdlException("Invalid correlation ID. The correlation ID, " + request.getCorrelationID()
+ " , is a reserved correlation ID.", SdlExceptionCause.RESERVED_CORRELATION_ID);
}
// Throw exception if RPCRequest is sent when SDL is unavailable
if (!_appInterfaceRegisterd && !request.getFunctionName().equals(FunctionID.REGISTER_APP_INTERFACE.toString())) {
SdlTrace.logProxyEvent("Application attempted to send an RPCRequest (non-registerAppInterface), before the interface was registerd.", SDL_LIB_TRACE_KEY);
throw new SdlException("SDL is currently unavailable. RPC Requests cannot be sent.", SdlExceptionCause.SDL_UNAVAILABLE);
}
if (_advancedLifecycleManagementEnabled) {
if (request.getFunctionName().equals(FunctionID.REGISTER_APP_INTERFACE.toString())
|| request.getFunctionName().equals(FunctionID.UNREGISTER_APP_INTERFACE.toString())) {
SdlTrace.logProxyEvent("Application attempted to send a RegisterAppInterface or UnregisterAppInterface while using ALM.", SDL_LIB_TRACE_KEY);
throw new SdlException("The RPCRequest, " + request.getFunctionName() +
", is un-allowed using the Advanced Lifecycle Management Model.", SdlExceptionCause.INCORRECT_LIFECYCLE_MODEL);
}
}
//FIXME this is temporary until the next major release of the library where OK is removed
if(FunctionID.SUBSCRIBE_BUTTON.toString().equals(request.getFunctionName())
|| FunctionID.UNSUBSCRIBE_BUTTON.toString().equals(request.getFunctionName())
|| FunctionID.BUTTON_PRESS.toString().equals(request.getFunctionName())){
ButtonName buttonName = (ButtonName)request.getObject(ButtonName.class, SubscribeButton.KEY_BUTTON_NAME);
if(rpcSpecVersion != null && rpcSpecVersion.getMajor() < 5) {
if (ButtonName.PLAY_PAUSE.equals(buttonName)) {
request.setParameters(SubscribeButton.KEY_BUTTON_NAME, ButtonName.OK);
}
} else { //Newer than version 5.0.0
if(ButtonName.OK.equals(buttonName)){
RPCRequest request2 = new RPCRequest(request);
request2.setParameters(SubscribeButton.KEY_BUTTON_NAME, ButtonName.PLAY_PAUSE);
sendRPCRequestPrivate(request2);
}
}
}
sendRPCRequestPrivate(request);
} // end-method
protected void notifyProxyClosed(final String info, final Exception e, final SdlDisconnectedReason reason) {
SdlTrace.logProxyEvent("NotifyProxyClose", SDL_LIB_TRACE_KEY);
Log.d(TAG, "notifyProxyClosed: " + info);
OnProxyClosed message = new OnProxyClosed(info, e, reason);
queueInternalMessage(message);
}
private void passErrorToProxyListener(final String info, final Exception e) {
OnError message = new OnError(info, e);
queueInternalMessage(message);
}
private void startRPCProtocolSession() {
// Set Proxy Lifecyclek Available
if (_advancedLifecycleManagementEnabled) {
try {
registerAppInterfacePrivate(
_sdlMsgVersionRequest,
_applicationName,
_ttsName,
_ngnMediaScreenAppName,
_vrSynonyms,
_isMediaApp,
_sdlLanguageDesired,
_hmiDisplayLanguageDesired,
_appType,
_appID,
_dayColorScheme,
_nightColorScheme,
REGISTER_APP_INTERFACE_CORRELATION_ID);
} catch (Exception e) {
notifyProxyClosed("Failed to register application interface with SDL. Check parameter values given to SdlProxy constructor.", e, SdlDisconnectedReason.SDL_REGISTRATION_ERROR);
}
} else {
InternalProxyMessage message = new InternalProxyMessage(InternalProxyMessage.OnProxyOpened);
queueInternalMessage(message);
}
}
// Queue internal callback message
private void queueInternalMessage(InternalProxyMessage message) {
synchronized(INTERNAL_MESSAGE_QUEUE_THREAD_LOCK) {
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.queueMessage(message);
}
}
}
// Queue incoming ProtocolMessage
private void queueIncomingMessage(ProtocolMessage message) {
synchronized(INCOMING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.queueMessage(message);
}
}
}
private FileInputStream getFileInputStream(String sLocalFile)
{
FileInputStream is = null;
try
{
is = new FileInputStream(sLocalFile);
}
catch (IOException e1)
{
e1.printStackTrace();
}
return is;
}
private Long getFileInputStreamSize(FileInputStream is)
{
Long lSize = null;
try
{
lSize = is.getChannel().size();
}
catch (IOException e)
{
e.printStackTrace();
}
return lSize;
}
private void closeFileInputStream(FileInputStream is)
{
try
{
is.close();
}
catch (Exception e)
{
e.printStackTrace();
}
}
@SuppressWarnings("unchecked")
private RPCStreamController startRPCStream(String sLocalFile, PutFile request, SessionType sType, byte rpcSessionID, Version protocolVersion)
{
if (sdlSession == null) return null;
FileInputStream is = getFileInputStream(sLocalFile);
if (is == null) return null;
Long lSize = getFileInputStreamSize(is);
if (lSize == null)
{
closeFileInputStream(is);
return null;
}
try {
StreamRPCPacketizer rpcPacketizer = new StreamRPCPacketizer((SdlProxyBase<IProxyListenerBase>) this, sdlSession, is, request, sType, rpcSessionID, protocolVersion, rpcSpecVersion, lSize, sdlSession);
rpcPacketizer.start();
return new RPCStreamController(rpcPacketizer, request.getCorrelationID());
} catch (Exception e) {
Log.e("SyncConnection", "Unable to start streaming:" + e.toString());
return null;
}
}
@SuppressWarnings({"unchecked", "UnusedReturnValue"})
private RPCStreamController startRPCStream(InputStream is, PutFile request, SessionType sType, byte rpcSessionID, Version protocolVersion)
{
if (sdlSession == null) return null;
Long lSize = request.getLength();
if (lSize == null)
{
return null;
}
try {
StreamRPCPacketizer rpcPacketizer = new StreamRPCPacketizer((SdlProxyBase<IProxyListenerBase>) this, sdlSession, is, request, sType, rpcSessionID, protocolVersion, rpcSpecVersion, lSize, sdlSession);
rpcPacketizer.start();
return new RPCStreamController(rpcPacketizer, request.getCorrelationID());
} catch (Exception e) {
Log.e("SyncConnection", "Unable to start streaming:" + e.toString());
return null;
}
}
private RPCStreamController startPutFileStream(String sPath, PutFile msg) {
if (sdlSession == null) return null;
return startRPCStream(sPath, msg, SessionType.RPC, sdlSession.getSessionId(), protocolVersion);
}
private RPCStreamController startPutFileStream(InputStream is, PutFile msg) {
if (sdlSession == null) return null;
if (is == null) return null;
return startRPCStream(is, msg, SessionType.RPC, sdlSession.getSessionId(), protocolVersion);
}
@SuppressWarnings("UnusedReturnValue")
public boolean startRPCStream(InputStream is, RPCRequest msg) {
if (sdlSession == null) return false;
sdlSession.startRPCStream(is, msg, SessionType.RPC, sdlSession.getSessionId(), (byte)getProtocolVersion().getMajor());
return true;
}
public OutputStream startRPCStream(RPCRequest msg) {
if (sdlSession == null) return null;
return sdlSession.startRPCStream(msg, SessionType.RPC, sdlSession.getSessionId(), (byte)getProtocolVersion().getMajor());
}
public void endRPCStream() {
if (sdlSession == null) return;
sdlSession.stopRPCStream();
}
private class CallableMethod implements Callable<Void> {
private final long waitTime;
public CallableMethod(int timeInMillis){
this.waitTime=timeInMillis;
}
@Override
public Void call() {
try {
Thread.sleep(waitTime);
} catch (InterruptedException e) {
e.printStackTrace();
}
return null;
}
}
public FutureTask<Void> createFutureTask(CallableMethod callMethod){
return new FutureTask<Void>(callMethod);
}
public ScheduledExecutorService createScheduler(){
return Executors.newSingleThreadScheduledExecutor();
}
@SuppressWarnings("unused")
public void startService(SessionType serviceType, boolean isEncrypted){
sdlSession.startService(serviceType, sdlSession.getSessionId(), isEncrypted);
}
@SuppressWarnings("unused")
public void endService(SessionType serviceType){
sdlSession.endService(serviceType, sdlSession.getSessionId());
}
/**
* @deprecated
*Opens the video service (serviceType 11) and subsequently streams raw H264 video from an InputStream provided by the app
*@return true if service is opened successfully and stream is started, return false otherwise
* @see #startRemoteDisplayStream(Context, Class, VideoStreamingParameters, boolean) startRemoteDisplayStream
* @see #startVideoStream(boolean, VideoStreamingParameters) startVideoStream
* @see #createOpenGLInputSurface(int, int, int, int, int, boolean) createOpenGLInputSurface
*/
@SuppressWarnings("unused")
@Deprecated
public boolean startH264(InputStream is, boolean isEncrypted) {
if (sdlSession == null) return false;
navServiceStartResponseReceived = false;
navServiceStartResponse = false;
navServiceStartRejectedParams = null;
// When startH264() API is used, we will not send video format / width / height information
// with StartService. (Reasons: InputStream does not provide timestamp information so RTP
// cannot be used. startH264() does not provide with/height information.)
VideoStreamingParameters emptyParam = new VideoStreamingParameters();
emptyParam.setResolution(null);
emptyParam.setFormat(null);
sdlSession.setDesiredVideoParams(emptyParam);
sdlSession.startService(SessionType.NAV, sdlSession.getSessionId(), isEncrypted);
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!navServiceStartResponseReceived && !fTask.isDone());
scheduler.shutdown();
if (navServiceStartResponse) {
try {
sdlSession.startStream(is, SessionType.NAV, sdlSession.getSessionId());
return true;
} catch (Exception e) {
return false;
}
} else {
return false;
}
}
/**
* @deprecated
*Opens the video service (serviceType 11) and subsequently provides an OutputStream to the app to use for a raw H264 video stream
*@return OutputStream if service is opened successfully and stream is started, return null otherwise
* @see #startRemoteDisplayStream(Context, Class, VideoStreamingParameters, boolean) startRemoteDisplayStream
* @see #startVideoStream(boolean, VideoStreamingParameters) startVideoStream
* @see #createOpenGLInputSurface(int, int, int, int, int, boolean) createOpenGLInputSurface
*/
@SuppressWarnings("unused")
@Deprecated
public OutputStream startH264(boolean isEncrypted) {
if (sdlSession == null) return null;
navServiceStartResponseReceived = false;
navServiceStartResponse = false;
navServiceStartRejectedParams = null;
// When startH264() API is used, we will not send video format / width / height information
// with StartService. (Reasons: OutputStream does not provide timestamp information so RTP
// cannot be used. startH264() does not provide with/height information.)
VideoStreamingParameters emptyParam = new VideoStreamingParameters();
emptyParam.setResolution(null);
emptyParam.setFormat(null);
sdlSession.setDesiredVideoParams(emptyParam);
sdlSession.startService(SessionType.NAV, sdlSession.getSessionId(), isEncrypted);
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!navServiceStartResponseReceived && !fTask.isDone());
scheduler.shutdown();
if (navServiceStartResponse) {
try {
return sdlSession.startStream(SessionType.NAV, sdlSession.getSessionId());
} catch (Exception e) {
return null;
}
} else {
return null;
}
}
/**
*Closes the opened video service (serviceType 11)
*@return true if the video service is closed successfully, return false otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public boolean endH264() {
return endVideoStream();
}
/**
*Pauses the stream for the opened audio service (serviceType 10)
*@return true if the audio service stream is paused successfully, return false otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public boolean pausePCM() {
return pauseAudioStream();
}
/**
*Pauses the stream for the opened video service (serviceType 11)
*@return true if the video service stream is paused successfully, return false otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public boolean pauseH264() {
return pauseVideoStream();
}
/**
*Resumes the stream for the opened audio service (serviceType 10)
*@return true if the audio service stream is resumed successfully, return false otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public boolean resumePCM() {
return resumeAudioStream();
}
/**
*Resumes the stream for the opened video service (serviceType 11)
*@return true if the video service is resumed successfully, return false otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public boolean resumeH264() {
return resumeVideoStream();
}
/**
*Opens the audio service (serviceType 10) and subsequently streams raw PCM audio from an InputStream provided by the app
*@return true if service is opened successfully and stream is started, return false otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public boolean startPCM(InputStream is, boolean isEncrypted) {
if (sdlSession == null) return false;
pcmServiceStartResponseReceived = false;
pcmServiceStartResponse = false;
sdlSession.startService(SessionType.PCM, sdlSession.getSessionId(), isEncrypted);
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!pcmServiceStartResponseReceived && !fTask.isDone());
scheduler.shutdown();
if (pcmServiceStartResponse) {
try {
sdlSession.startStream(is, SessionType.PCM, sdlSession.getSessionId());
return true;
} catch (Exception e) {
return false;
}
} else {
return false;
}
}
/**
*Opens the audio service (serviceType 10) and subsequently provides an OutputStream to the app
*@return OutputStream if service is opened successfully and stream is started, return null otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public OutputStream startPCM(boolean isEncrypted) {
if (sdlSession == null) return null;
pcmServiceStartResponseReceived = false;
pcmServiceStartResponse = false;
sdlSession.startService(SessionType.PCM, sdlSession.getSessionId(), isEncrypted);
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!pcmServiceStartResponseReceived && !fTask.isDone());
scheduler.shutdown();
if (pcmServiceStartResponse) {
try {
return sdlSession.startStream(SessionType.PCM, sdlSession.getSessionId());
} catch (Exception e) {
return null;
}
} else {
if (pcmServiceStartRejectedParams != null) {
StringBuilder builder = new StringBuilder();
for (String paramName : pcmServiceStartRejectedParams) {
if (builder.length() > 0) {
builder.append(", ");
}
builder.append(paramName);
}
DebugTool.logWarning("StartService for nav failed. Rejected params: " + builder.toString());
} else {
DebugTool.logWarning("StartService for nav failed (rejected params not supplied)");
}
return null;
}
}
/**
*Closes the opened audio service (serviceType 10)
*@return true if the audio service is closed successfully, return false otherwise
*/
@SuppressWarnings("unused")
@Deprecated
public boolean endPCM() {
return endAudioStream();
}
/**
* Opens a video service (service type 11) and subsequently provides an IVideoStreamListener
* to the app to send video data. The supplied VideoStreamingParameters will be set as desired paramaters
* that will be used to negotiate
*
* @param isEncrypted Specify true if packets on this service have to be encrypted
* @param parameters Video streaming parameters including: codec which will be used for streaming (currently, only
* VideoStreamingCodec.H264 is accepted), height and width of the video in pixels.
*
* @return IVideoStreamListener interface if service is opened successfully and streaming is
* started, null otherwise
*/
@SuppressWarnings("unused")
public IVideoStreamListener startVideoStream(boolean isEncrypted, VideoStreamingParameters parameters) {
if (sdlSession == null) {
DebugTool.logWarning("SdlSession is not created yet.");
return null;
}
if (!sdlSession.getIsConnected()) {
DebugTool.logWarning("Connection is not available.");
return null;
}
sdlSession.setDesiredVideoParams(parameters);
VideoStreamingParameters acceptedParams = tryStartVideoStream(isEncrypted, parameters);
if (acceptedParams != null) {
return sdlSession.startVideoStream();
} else {
return null;
}
}
/**
*Closes the opened video service (serviceType 11)
*@return true if the video service is closed successfully, return false otherwise
*/
@SuppressWarnings("unused")
public boolean endVideoStream() {
if (sdlSession == null){ return false; }
navServiceEndResponseReceived = false;
navServiceEndResponse = false;
sdlSession.stopVideoStream();
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!navServiceEndResponseReceived && !fTask.isDone());
scheduler.shutdown();
return navServiceEndResponse;
}
/**
*Pauses the stream for the opened video service (serviceType 11)
*@return true if the video service stream is paused successfully, return false otherwise
*/
@SuppressWarnings("unused")
public boolean pauseVideoStream() {
return sdlSession != null && sdlSession.pauseVideoStream();
}
/**
*Resumes the stream for the opened video service (serviceType 11)
*@return true if the video service is resumed successfully, return false otherwise
*/
@SuppressWarnings("unused")
public boolean resumeVideoStream() {
return sdlSession != null && sdlSession.resumeVideoStream();
}
/**
* Opens the video service (serviceType 11) and creates a Surface (used for streaming video) with input parameters provided by the app
* @param frameRate - specified rate of frames to utilize for creation of Surface
* @param iFrameInterval - specified interval to utilize for creation of Surface
* @param width - specified width to utilize for creation of Surface
* @param height - specified height to utilize for creation of Surface
* @param bitrate - specified bitrate to utilize for creation of Surface
*@return Surface if service is opened successfully and stream is started, return null otherwise
*/
@SuppressWarnings("unused")
public Surface createOpenGLInputSurface(int frameRate, int iFrameInterval, int width,
int height, int bitrate, boolean isEncrypted) {
if (sdlSession == null || !sdlSession.getIsConnected()){
return null;
}
VideoStreamingParameters desired = new VideoStreamingParameters();
desired.setFrameRate(frameRate);
desired.setInterval(iFrameInterval);
ImageResolution resolution = new ImageResolution();
resolution.setResolutionWidth(width);
resolution.setResolutionHeight(height);
desired.setResolution(resolution);
desired.setBitrate(bitrate);
VideoStreamingParameters acceptedParams = tryStartVideoStream(isEncrypted, desired);
if (acceptedParams != null) {
return sdlSession.createOpenGLInputSurface(frameRate, iFrameInterval, width,
height, bitrate, SessionType.NAV, sdlSession.getSessionId());
} else {
return null;
}
}
/**
* Starts streaming a remote display to the module if there is a connected session. This method of streaming requires the device to be on API level 19 or higher
* @param context a context that can be used to create the remote display
* @param remoteDisplay class object of the remote display. This class will be used to create an instance of the remote display and will be projected to the module
* @param parameters streaming parameters to be used when streaming. If null is sent in, the default/optimized options will be used.
* If you are unsure about what parameters to be used it is best to just send null and let the system determine what
* works best for the currently connected module.
*
* @param encrypted a flag of if the stream should be encrypted. Only set if you have a supplied encryption library that the module can understand.
*/
@TargetApi(19)
public void startRemoteDisplayStream(Context context, final Class<? extends SdlRemoteDisplay> remoteDisplay, final VideoStreamingParameters parameters, final boolean encrypted){
if(protocolVersion!= null && protocolVersion.getMajor() >= 5 && !_systemCapabilityManager.isCapabilitySupported(SystemCapabilityType.VIDEO_STREAMING)){
Log.e(TAG, "Video streaming not supported on this module");
return;
}
//Create streaming manager
if(manager == null){
manager = new VideoStreamingManager(context,this._internalInterface);
}
if(parameters == null){
if(protocolVersion!= null && protocolVersion.getMajor() >= 5) {
_systemCapabilityManager.getCapability(SystemCapabilityType.VIDEO_STREAMING, new OnSystemCapabilityListener() {
@Override
public void onCapabilityRetrieved(Object capability) {
VideoStreamingParameters params = new VideoStreamingParameters();
params.update((VideoStreamingCapability)capability); //Streaming parameters are ready time to stream
sdlSession.setDesiredVideoParams(params);
manager.startVideoStreaming(remoteDisplay, params, encrypted);
}
@Override
public void onError(String info) {
Log.e(TAG, "Error retrieving video streaming capability: " + info);
}
});
}else{
//We just use default video streaming params
VideoStreamingParameters params = new VideoStreamingParameters();
DisplayCapabilities dispCap = (DisplayCapabilities)_systemCapabilityManager.getCapability(SystemCapabilityType.DISPLAY);
if(dispCap !=null){
params.setResolution(dispCap.getScreenParams().getImageResolution());
}
sdlSession.setDesiredVideoParams(params);
manager.startVideoStreaming(remoteDisplay,params, encrypted);
}
}else{
sdlSession.setDesiredVideoParams(parameters);
manager.startVideoStreaming(remoteDisplay,parameters, encrypted);
}
}
/**
* Stops the remote display stream if one has been started
*/
public void stopRemoteDisplayStream(){
if(manager!=null){
manager.dispose();
}
manager = null;
}
/**
* Try to open a video service by using the video streaming parameters supplied.
*
* Only information from codecs, width and height are used during video format negotiation.
*
* @param isEncrypted Specify true if packets on this service have to be encrypted
* @param parameters VideoStreamingParameters that are desired. Does not guarantee this is what will be accepted.
*
* @return If the service is opened successfully, an instance of VideoStreamingParams is
* returned which contains accepted video format. If the service is opened with legacy
* mode (i.e. without any negotiation) then an instance of VideoStreamingParams is
* returned. If the service was not opened then null is returned.
*/
@SuppressWarnings("unused")
private VideoStreamingParameters tryStartVideoStream(boolean isEncrypted, VideoStreamingParameters parameters) {
if (sdlSession == null) {
DebugTool.logWarning("SdlSession is not created yet.");
return null;
}
if(protocolVersion!= null && protocolVersion.getMajor() >= 5 && !_systemCapabilityManager.isCapabilitySupported(SystemCapabilityType.VIDEO_STREAMING)){
DebugTool.logWarning("Module doesn't support video streaming.");
return null;
}
if (parameters == null) {
DebugTool.logWarning("Video parameters were not supplied.");
return null;
}
sdlSession.setDesiredVideoParams(parameters);
navServiceStartResponseReceived = false;
navServiceStartResponse = false;
navServiceStartRejectedParams = null;
sdlSession.startService(SessionType.NAV, sdlSession.getSessionId(), isEncrypted);
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!navServiceStartResponseReceived && !fTask.isDone());
scheduler.shutdown();
if (navServiceStartResponse) {
if(protocolVersion!= null && protocolVersion.getMajor() < 5){ //Versions 1-4 do not support streaming parameter negotiations
sdlSession.setAcceptedVideoParams(parameters);
}
return sdlSession.getAcceptedVideoParams();
}
if (navServiceStartRejectedParams != null) {
StringBuilder builder = new StringBuilder();
for (String paramName : navServiceStartRejectedParams) {
if (builder.length() > 0) {
builder.append(", ");
}
builder.append(paramName);
}
DebugTool.logWarning("StartService for nav failed. Rejected params: " + builder.toString());
} else {
DebugTool.logWarning("StartService for nav failed (rejected params not supplied)");
}
return null;
}
/**
*Starts the MediaCodec encoder utilized in conjunction with the Surface returned via the createOpenGLInputSurface method
*/
@SuppressWarnings("unused")
public void startEncoder () {
if (sdlSession == null || !sdlSession.getIsConnected()) return;
sdlSession.startEncoder();
}
/**
*Releases the MediaCodec encoder utilized in conjunction with the Surface returned via the createOpenGLInputSurface method
*/
@SuppressWarnings("unused")
public void releaseEncoder() {
if (sdlSession == null || !sdlSession.getIsConnected()) return;
sdlSession.releaseEncoder();
}
/**
*Releases the MediaCodec encoder utilized in conjunction with the Surface returned via the createOpenGLInputSurface method
*/
@SuppressWarnings("unused")
public void drainEncoder(boolean endOfStream) {
if (sdlSession == null || !sdlSession.getIsConnected()) return;
sdlSession.drainEncoder(endOfStream);
}
/**
* Opens a audio service (service type 10) and subsequently provides an IAudioStreamListener
* to the app to send audio data.
*
* Currently information passed by "params" are ignored, since Audio Streaming feature lacks
* capability negotiation mechanism. App should configure audio stream data to align with
* head unit's capability by checking (upcoming) pcmCapabilities. The default format is in
* 16kHz and 16 bits.
*
* @param isEncrypted Specify true if packets on this service have to be encrypted
* @param codec Audio codec which will be used for streaming. Currently, only
* AudioStreamingCodec.LPCM is accepted.
* @param params (Reserved for future use) Additional configuration information for each
* codec. If "codec" is AudioStreamingCodec.LPCM, "params" must be an
* instance of LPCMParams class.
*
* @return IAudioStreamListener interface if service is opened successfully and streaming is
* started, null otherwise
*/
@SuppressWarnings("unused")
public IAudioStreamListener startAudioStream(boolean isEncrypted, AudioStreamingCodec codec,
AudioStreamingParams params) {
if (sdlSession == null) {
DebugTool.logWarning("SdlSession is not created yet.");
return null;
}
if (!sdlSession.getIsConnected()) {
DebugTool.logWarning("Connection is not available.");
return null;
}
if (codec != AudioStreamingCodec.LPCM) {
DebugTool.logWarning("Audio codec " + codec + " is not supported.");
return null;
}
pcmServiceStartResponseReceived = false;
pcmServiceStartResponse = false;
sdlSession.startService(SessionType.PCM, sdlSession.getSessionId(), isEncrypted);
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!pcmServiceStartResponseReceived && !fTask.isDone());
scheduler.shutdown();
if (pcmServiceStartResponse) {
DebugTool.logInfo("StartService for audio succeeded");
return sdlSession.startAudioStream();
} else {
if (pcmServiceStartRejectedParams != null) {
StringBuilder builder = new StringBuilder();
for (String paramName : pcmServiceStartRejectedParams) {
if (builder.length() > 0) {
builder.append(", ");
}
builder.append(paramName);
}
DebugTool.logWarning("StartService for audio failed. Rejected params: " + builder.toString());
} else {
DebugTool.logWarning("StartService for audio failed (rejected params not supplied)");
}
return null;
}
}
/**
*Closes the opened audio service (serviceType 10)
*@return true if the audio service is closed successfully, return false otherwise
*/
@SuppressWarnings("unused")
public boolean endAudioStream() {
if (sdlSession == null || !sdlSession.getIsConnected()) return false;
pcmServiceEndResponseReceived = false;
pcmServiceEndResponse = false;
sdlSession.stopAudioStream();
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!pcmServiceEndResponseReceived && !fTask.isDone());
scheduler.shutdown();
return pcmServiceEndResponse;
}
/**
*Pauses the stream for the opened audio service (serviceType 10)
*@return true if the audio service stream is paused successfully, return false otherwise
*/
@SuppressWarnings("unused")
public boolean pauseAudioStream() {
return sdlSession != null && sdlSession.pauseAudioStream();
}
/**
*Resumes the stream for the opened audio service (serviceType 10)
*@return true if the audio service stream is resumed successfully, return false otherwise
*/
@SuppressWarnings("unused")
public boolean resumeAudioStream() {
return sdlSession != null && sdlSession.resumeAudioStream();
}
private void NavServiceStarted() {
navServiceStartResponseReceived = true;
navServiceStartResponse = true;
}
private void NavServiceStartedNACK(List<String> rejectedParams) {
navServiceStartResponseReceived = true;
navServiceStartResponse = false;
navServiceStartRejectedParams = rejectedParams;
}
private void AudioServiceStarted() {
pcmServiceStartResponseReceived = true;
pcmServiceStartResponse = true;
}
private void RPCProtectedServiceStarted() {
rpcProtectedResponseReceived = true;
rpcProtectedStartResponse = true;
}
private void AudioServiceStartedNACK(List<String> rejectedParams) {
pcmServiceStartResponseReceived = true;
pcmServiceStartResponse = false;
pcmServiceStartRejectedParams = rejectedParams;
}
private void NavServiceEnded() {
navServiceEndResponseReceived = true;
navServiceEndResponse = true;
}
private void NavServiceEndedNACK() {
navServiceEndResponseReceived = true;
navServiceEndResponse = false;
}
private void AudioServiceEnded() {
pcmServiceEndResponseReceived = true;
pcmServiceEndResponse = true;
}
private void AudioServiceEndedNACK() {
pcmServiceEndResponseReceived = true;
pcmServiceEndResponse = false;
}
public void setAppService(Service mService)
{
_appService = mService;
}
@SuppressWarnings("unused")
public boolean startProtectedRPCService() {
rpcProtectedResponseReceived = false;
rpcProtectedStartResponse = false;
sdlSession.startService(SessionType.RPC, sdlSession.getSessionId(), true);
FutureTask<Void> fTask = createFutureTask(new CallableMethod(RESPONSE_WAIT_TIME));
ScheduledExecutorService scheduler = createScheduler();
scheduler.execute(fTask);
//noinspection StatementWithEmptyBody
while (!rpcProtectedResponseReceived && !fTask.isDone());
scheduler.shutdown();
return rpcProtectedStartResponse;
}
@SuppressWarnings("unused")
public void getLockScreenIcon(final OnLockScreenIconDownloadedListener l){
if(lockScreenIconRequest == null){
l.onLockScreenIconDownloadError(new SdlException("This version of SDL core may not support lock screen icons.",
SdlExceptionCause.LOCK_SCREEN_ICON_NOT_SUPPORTED));
return;
}
LockScreenManager lockMan = sdlSession.getLockScreenMan();
Bitmap bitmap = lockMan.getLockScreenIcon();
// read bitmap if it was already downloaded so we don't have to download it every time
if(bitmap != null){
l.onLockScreenIconDownloaded(bitmap);
}
else{
String url = lockScreenIconRequest.getUrl();
sdlSession.getLockScreenMan().downloadLockScreenIcon(url, l);
}
}
/* ******************* Public Helper Methods *************************/
/*Begin V1 Enhanced helper*/
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param parentID -Menu parent ID for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("SameParameterValue")
public void addCommand(@NonNull Integer commandID,
String menuText, Integer parentID, Integer position,
Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
AddCommand msg = new AddCommand(commandID);
msg.setCorrelationID(correlationID);
if (vrCommands != null) msg.setVrCommands(vrCommands);
Image cmdIcon = null;
if (IconValue != null && IconType != null)
{
cmdIcon = new Image();
cmdIcon.setValue(IconValue);
cmdIcon.setImageType(IconType);
}
if (cmdIcon != null) msg.setCmdIcon(cmdIcon);
if(menuText != null || parentID != null || position != null) {
MenuParams menuParams = new MenuParams();
menuParams.setMenuName(menuText);
menuParams.setPosition(position);
menuParams.setParentID(parentID);
msg.setMenuParams(menuParams);
}
sendRPCRequest(msg);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, Integer position,
Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, vrCommands, IconValue, IconType, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, Integer position, String IconValue, ImageType IconType,
Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, null, IconValue, IconType, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, null, null, IconValue, IconType, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param commandID -Unique command ID of the command to add.
* @param menuText -Menu text for optional sub value containing menu parameters.
* @param vrCommands -VR synonyms for this AddCommand.
* @param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
* @param IconType -Describes whether the image is static or dynamic
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, null, vrCommands, IconValue, IconType, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param commandID -Unique command ID of the command to add.
* @param vrCommands -VR synonyms for this AddCommand.
* @param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
* @param IconType -Describes whether the image is static or dynamic
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, null, null, null, vrCommands, IconValue, IconType, correlationID);
}
/*End V1 Enhanced helper*/
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param parentID -Menu parent ID for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("SameParameterValue")
public void addCommand(@NonNull Integer commandID,
String menuText, Integer parentID, Integer position,
Vector<String> vrCommands, Integer correlationID)
throws SdlException {
AddCommand msg = new AddCommand(commandID);
msg.setCorrelationID(correlationID);
msg.setVrCommands(vrCommands);
if(menuText != null || parentID != null || position != null) {
MenuParams menuParams = new MenuParams();
menuParams.setMenuName(menuText);
menuParams.setPosition(position);
menuParams.setParentID(parentID);
msg.setMenuParams(menuParams);
}
sendRPCRequest(msg);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, Integer position,
Vector<String> vrCommands, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, vrCommands, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, Integer position,
Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, null, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, null, (Vector<String>)null, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
String menuText, Vector<String> vrCommands, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, null, vrCommands, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void addCommand(Integer commandID,
Vector<String> vrCommands, Integer correlationID)
throws SdlException {
addCommand(commandID, null, null, null, vrCommands, correlationID);
}
/**
* Sends an AddSubMenu RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param menuID -Unique ID of the sub menu to add.
* @param menuName -Text to show in the menu for this sub menu.
* @param position -Position within the items that are are at top level of the in application menu.
* @param menuIcon -Image to be be shown along with the submenu item
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("SameParameterValue")
public void addSubMenu(@NonNull Integer menuID, @NonNull String menuName,
Integer position, Image menuIcon, Integer correlationID)
throws SdlException {
AddSubMenu msg = new AddSubMenu(menuID, menuName);
msg.setCorrelationID(correlationID);
msg.setPosition(position);
msg.setMenuIcon(menuIcon);
sendRPCRequest(msg);
}
/**
* Sends an AddSubMenu RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param menuID -Unique ID of the sub menu to add.
* @param menuName -Text to show in the menu for this sub menu.
* @param position -Position within the items that are are at top level of the in application menu.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@Deprecated
@SuppressWarnings("SameParameterValue")
public void addSubMenu(@NonNull Integer menuID, @NonNull String menuName,
Integer position, Integer correlationID)
throws SdlException {
addSubMenu(menuID, menuName, position, null, correlationID);
}
/**
* Sends an AddSubMenu RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param menuID -Unique ID of the sub menu to add.
* @param menuName -Text to show in the menu for this sub menu.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@Deprecated
@SuppressWarnings("unused")
public void addSubMenu(Integer menuID, String menuName,
Integer correlationID) throws SdlException {
addSubMenu(menuID, menuName, null, null, correlationID);
}
/*Begin V1 Enhanced helper*/
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param alertText3 -The optional third line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("SameParameterValue")
public void alert(String ttsText, String alertText1,
String alertText2, String alertText3, Boolean playTone, Integer duration, Vector<SoftButton> softButtons,
Integer correlationID) throws SdlException {
Vector<TTSChunk> chunks = TTSChunkFactory.createSimpleTTSChunks(ttsText);
Alert msg = new Alert();
msg.setCorrelationID(correlationID);
msg.setAlertText1(alertText1);
msg.setAlertText2(alertText2);
msg.setAlertText3(alertText3);
msg.setDuration(duration);
msg.setPlayTone(playTone);
msg.setTtsChunks(chunks);
msg.setSoftButtons(softButtons);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsChunks -Text/phonemes to speak in the form of ttsChunks.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param alertText3 -The optional third line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
public void alert(Vector<TTSChunk> ttsChunks,
String alertText1, String alertText2, String alertText3, Boolean playTone,
Integer duration, Vector<SoftButton> softButtons, Integer correlationID) throws SdlException {
Alert msg = new Alert();
msg.setCorrelationID(correlationID);
msg.setAlertText1(alertText1);
msg.setAlertText2(alertText2);
msg.setAlertText3(alertText3);
msg.setDuration(duration);
msg.setPlayTone(playTone);
msg.setTtsChunks(ttsChunks);
msg.setSoftButtons(softButtons);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param playTone -Defines if tone should be played.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void alert(String ttsText, Boolean playTone, Vector<SoftButton> softButtons,
Integer correlationID) throws SdlException {
alert(ttsText, null, null, null, playTone, null, softButtons, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param chunks -A list of text/phonemes to speak in the form of ttsChunks.
* @param playTone -Defines if tone should be played.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void alert(Vector<TTSChunk> chunks, Boolean playTone, Vector<SoftButton> softButtons,
Integer correlationID) throws SdlException {
alert(chunks, null, null, null, playTone, null, softButtons, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param alertText3 -The optional third line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void alert(String alertText1, String alertText2, String alertText3,
Boolean playTone, Integer duration, Vector<SoftButton> softButtons, Integer correlationID)
throws SdlException {
alert((Vector<TTSChunk>)null, alertText1, alertText2, alertText3, playTone, duration, softButtons, correlationID);
}
/*End V1 Enhanced helper*/
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("SameParameterValue")
public void alert(String ttsText, String alertText1,
String alertText2, Boolean playTone, Integer duration,
Integer correlationID) throws SdlException {
Vector<TTSChunk> chunks = TTSChunkFactory.createSimpleTTSChunks(ttsText);
Alert msg = new Alert();
msg.setCorrelationID(correlationID);
msg.setAlertText1(alertText1);
msg.setAlertText2(alertText2);
msg.setDuration(duration);
msg.setPlayTone(playTone);
msg.setTtsChunks(chunks);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsChunks -A list of text/phonemes to speak in the form of ttsChunks.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
public void alert(Vector<TTSChunk> ttsChunks,
String alertText1, String alertText2, Boolean playTone,
Integer duration, Integer correlationID) throws SdlException {
Alert msg = new Alert();
msg.setCorrelationID(correlationID);
msg.setAlertText1(alertText1);
msg.setAlertText2(alertText2);
msg.setDuration(duration);
msg.setPlayTone(playTone);
msg.setTtsChunks(ttsChunks);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param playTone -Defines if tone should be played.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void alert(String ttsText, Boolean playTone,
Integer correlationID) throws SdlException {
alert(ttsText, null, null, playTone, null, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param chunks -A list of text/phonemes to speak in the form of ttsChunks.
* @param playTone -Defines if tone should be played.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void alert(Vector<TTSChunk> chunks, Boolean playTone,
Integer correlationID) throws SdlException {
alert(chunks, null, null, playTone, null, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void alert(String alertText1, String alertText2,
Boolean playTone, Integer duration, Integer correlationID)
throws SdlException {
alert((Vector<TTSChunk>)null, alertText1, alertText2, playTone, duration, correlationID);
}
/**
* Sends a CreateInteractionChoiceSet RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param choiceSet to be sent to the module
* @param interactionChoiceSetID to be used in reference to the supplied choiceSet
* @param correlationID to be set to the RPCRequest
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void createInteractionChoiceSet(
@NonNull Vector<Choice> choiceSet, @NonNull Integer interactionChoiceSetID,
Integer correlationID) throws SdlException {
CreateInteractionChoiceSet msg = new CreateInteractionChoiceSet(interactionChoiceSetID, choiceSet);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a DeleteCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param commandID -ID of the command(s) to delete.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void deleteCommand(@NonNull Integer commandID,
Integer correlationID) throws SdlException {
DeleteCommand msg = new DeleteCommand(commandID);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a DeleteInteractionChoiceSet RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param interactionChoiceSetID -ID of the interaction choice set to delete.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void deleteInteractionChoiceSet(
@NonNull Integer interactionChoiceSetID, Integer correlationID)
throws SdlException {
DeleteInteractionChoiceSet msg = new DeleteInteractionChoiceSet(interactionChoiceSetID);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a DeleteSubMenu RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param menuID -The menuID of the submenu to delete.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void deleteSubMenu(Integer menuID,
Integer correlationID) throws SdlException {
DeleteSubMenu msg = new DeleteSubMenu(menuID);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/*Begin V1 Enhanced helper*/
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(String initPrompt,
@NonNull String displayText, @NonNull Integer interactionChoiceSetID, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
Vector<Integer> interactionChoiceSetIDs = new Vector<Integer>();
interactionChoiceSetIDs.add(interactionChoiceSetID);
Vector<TTSChunk> initChunks = TTSChunkFactory.createSimpleTTSChunks(initPrompt);
PerformInteraction msg = new PerformInteraction(displayText, InteractionMode.BOTH, interactionChoiceSetIDs);
msg.setInitialPrompt(initChunks);
msg.setVrHelp(vrHelp);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(String initPrompt,
@NonNull String displayText, @NonNull Integer interactionChoiceSetID,
String helpPrompt, String timeoutPrompt,
@NonNull InteractionMode interactionMode, Integer timeout, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
Vector<Integer> interactionChoiceSetIDs = new Vector<Integer>();
interactionChoiceSetIDs.add(interactionChoiceSetID);
Vector<TTSChunk> initChunks = TTSChunkFactory.createSimpleTTSChunks(initPrompt);
Vector<TTSChunk> helpChunks = TTSChunkFactory.createSimpleTTSChunks(helpPrompt);
Vector<TTSChunk> timeoutChunks = TTSChunkFactory.createSimpleTTSChunks(timeoutPrompt);
PerformInteraction msg = new PerformInteraction(displayText, interactionMode, interactionChoiceSetIDs);
msg.setInitialPrompt(initChunks);
msg.setTimeout(timeout);
msg.setHelpPrompt(helpChunks);
msg.setTimeoutPrompt(timeoutChunks);
msg.setVrHelp(vrHelp);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(String initPrompt,
@NonNull String displayText, @NonNull Vector<Integer> interactionChoiceSetIDList,
String helpPrompt, String timeoutPrompt,
@NonNull InteractionMode interactionMode, Integer timeout, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
Vector<TTSChunk> initChunks = TTSChunkFactory.createSimpleTTSChunks(initPrompt);
Vector<TTSChunk> helpChunks = TTSChunkFactory.createSimpleTTSChunks(helpPrompt);
Vector<TTSChunk> timeoutChunks = TTSChunkFactory.createSimpleTTSChunks(timeoutPrompt);
PerformInteraction msg = new PerformInteraction(displayText, interactionMode, interactionChoiceSetIDList);
msg.setInitialPrompt(initChunks);
msg.setTimeout(timeout);
msg.setHelpPrompt(helpChunks);
msg.setTimeoutPrompt(timeoutChunks);
msg.setVrHelp(vrHelp);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initChunks -A list of text/phonemes to speak for the initial prompt in the form of ttsChunks.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpChunks -A list of text/phonemes to speak for the help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutChunks A list of text/phonems to speak for the timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(
Vector<TTSChunk> initChunks, @NonNull String displayText,
@NonNull Vector<Integer> interactionChoiceSetIDList,
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks,
@NonNull InteractionMode interactionMode, Integer timeout, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
PerformInteraction msg = new PerformInteraction(displayText, interactionMode, interactionChoiceSetIDList);
msg.setInitialPrompt(initChunks);
msg.setTimeout(timeout);
msg.setHelpPrompt(helpChunks);
msg.setTimeoutPrompt(timeoutChunks);
msg.setVrHelp(vrHelp);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/*End V1 Enhanced*/
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(String initPrompt,
@NonNull String displayText, @NonNull Integer interactionChoiceSetID,
Integer correlationID) throws SdlException {
Vector<Integer> interactionChoiceSetIDs = new Vector<Integer>();
interactionChoiceSetIDs.add(interactionChoiceSetID);
Vector<TTSChunk> initChunks = TTSChunkFactory.createSimpleTTSChunks(initPrompt);
PerformInteraction msg = new PerformInteraction(displayText, InteractionMode.BOTH, interactionChoiceSetIDs);
msg.setInitialPrompt(initChunks);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(String initPrompt,
@NonNull String displayText, @NonNull Integer interactionChoiceSetID,
String helpPrompt, String timeoutPrompt,
@NonNull InteractionMode interactionMode, Integer timeout,
Integer correlationID) throws SdlException {
Vector<Integer> interactionChoiceSetIDs = new Vector<Integer>();
interactionChoiceSetIDs.add(interactionChoiceSetID);
Vector<TTSChunk> initChunks = TTSChunkFactory.createSimpleTTSChunks(initPrompt);
Vector<TTSChunk> helpChunks = TTSChunkFactory.createSimpleTTSChunks(helpPrompt);
Vector<TTSChunk> timeoutChunks = TTSChunkFactory.createSimpleTTSChunks(timeoutPrompt);
PerformInteraction msg = new PerformInteraction(displayText, interactionMode, interactionChoiceSetIDs);
msg.setInitialPrompt(initChunks);
msg.setTimeout(timeout);
msg.setHelpPrompt(helpChunks);
msg.setTimeoutPrompt(timeoutChunks);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(String initPrompt,
@NonNull String displayText, @NonNull Vector<Integer> interactionChoiceSetIDList,
String helpPrompt, String timeoutPrompt,
@NonNull InteractionMode interactionMode, Integer timeout,
Integer correlationID) throws SdlException {
Vector<TTSChunk> initChunks = TTSChunkFactory.createSimpleTTSChunks(initPrompt);
Vector<TTSChunk> helpChunks = TTSChunkFactory.createSimpleTTSChunks(helpPrompt);
Vector<TTSChunk> timeoutChunks = TTSChunkFactory.createSimpleTTSChunks(timeoutPrompt);
PerformInteraction msg = new PerformInteraction(displayText, interactionMode, interactionChoiceSetIDList);
msg.setInitialPrompt(initChunks);
msg.setTimeout(timeout);
msg.setHelpPrompt(helpChunks);
msg.setTimeoutPrompt(timeoutChunks);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initChunks -A list of text/phonemes to speak for the initial prompt in the form of ttsChunks.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpChunks -A list of text/phonemes to speak for the help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutChunks A list of text/phonems to speak for the timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performInteraction(
Vector<TTSChunk> initChunks, @NonNull String displayText,
@NonNull Vector<Integer> interactionChoiceSetIDList,
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks,
@NonNull InteractionMode interactionMode, Integer timeout,
Integer correlationID) throws SdlException {
PerformInteraction msg = new PerformInteraction(displayText, interactionMode, interactionChoiceSetIDList);
msg.setInitialPrompt(initChunks);
msg.setTimeout(timeout);
msg.setHelpPrompt(helpChunks);
msg.setTimeoutPrompt(timeoutChunks);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
// Protected registerAppInterface used to ensure only non-ALM applications call
// reqisterAppInterface
protected void registerAppInterfacePrivate(
@NonNull SdlMsgVersion sdlMsgVersion, @NonNull String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, @NonNull Boolean isMediaApp,
@NonNull Language languageDesired, @NonNull Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType,
@NonNull String appID, TemplateColorScheme dayColorScheme, TemplateColorScheme nightColorScheme, Integer correlationID)
throws SdlException {
String carrierName = null;
if(telephonyManager != null){
carrierName = telephonyManager.getNetworkOperatorName();
}
DeviceInfo deviceInfo = new DeviceInfo();
deviceInfo.setHardware(android.os.Build.MODEL);
deviceInfo.setOs(DeviceInfo.DEVICE_OS);
deviceInfo.setOsVersion(Build.VERSION.RELEASE);
deviceInfo.setCarrier(carrierName);
if (sdlMsgVersion == null) {
sdlMsgVersion = new SdlMsgVersion();
sdlMsgVersion.setMajorVersion(MAX_SUPPORTED_RPC_VERSION.getMajor());
sdlMsgVersion.setMinorVersion(MAX_SUPPORTED_RPC_VERSION.getMinor());
}
if (languageDesired == null) {
languageDesired = Language.EN_US;
}
if (hmiDisplayLanguageDesired == null) {
hmiDisplayLanguageDesired = Language.EN_US;
}
RegisterAppInterface msg = new RegisterAppInterface(sdlMsgVersion, appName, isMediaApp, languageDesired, hmiDisplayLanguageDesired, appID);
if (correlationID != null) {
msg.setCorrelationID(correlationID);
}
msg.setDeviceInfo(deviceInfo);
msg.setTtsName(ttsName);
if (ngnMediaScreenAppName == null) {
ngnMediaScreenAppName = appName;
}
msg.setNgnMediaScreenAppName(ngnMediaScreenAppName);
if (vrSynonyms == null) {
vrSynonyms = new Vector<String>();
vrSynonyms.add(appName);
}
msg.setVrSynonyms(vrSynonyms);
msg.setAppHMIType(appType);
msg.setDayColorScheme(dayColorScheme);
msg.setNightColorScheme(nightColorScheme);
if (_bAppResumeEnabled)
{
if (_lastHashID != null)
msg.setHashID(_lastHashID);
}
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.REGISTER_APP_INTERFACE.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_REQUEST);
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
sendBroadcastIntent(sendIntent);
sendRPCRequestPrivate(msg);
}
/*Begin V1 Enhanced helper function*/
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpPrompt that will be used for the VR screen
* @param timeoutPrompt string to be displayed after timeout
* @param vrHelpTitle string that may be displayed on VR prompt dialog
* @param vrHelp a list of VR synonyms that may be displayed to user
* @param correlationID to be attached to the request
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setGlobalProperties(
String helpPrompt, String timeoutPrompt, String vrHelpTitle, Vector<VrHelpItem> vrHelp, Integer correlationID)
throws SdlException {
SetGlobalProperties req = new SetGlobalProperties();
req.setCorrelationID(correlationID);
req.setHelpPrompt(TTSChunkFactory.createSimpleTTSChunks(helpPrompt));
req.setTimeoutPrompt(TTSChunkFactory.createSimpleTTSChunks(timeoutPrompt));
req.setVrHelpTitle(vrHelpTitle);
req.setVrHelp(vrHelp);
sendRPCRequest(req);
}
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpChunks tts chunks that should be used when prompting the user
* @param timeoutChunks tts chunks that will be used when a timeout occurs
* @param vrHelpTitle string that may be displayed on VR prompt dialog
* @param vrHelp a list of VR synonyms that may be displayed to user
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setGlobalProperties(
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks, String vrHelpTitle, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
SetGlobalProperties req = new SetGlobalProperties();
req.setCorrelationID(correlationID);
req.setHelpPrompt(helpChunks);
req.setTimeoutPrompt(timeoutChunks);
req.setVrHelpTitle(vrHelpTitle);
req.setVrHelp(vrHelp);
sendRPCRequest(req);
}
/*End V1 Enhanced helper function*/
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpPrompt that will be used for the VR screen
* @param timeoutPrompt string to be displayed after timeout
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setGlobalProperties(
String helpPrompt, String timeoutPrompt, Integer correlationID)
throws SdlException {
SetGlobalProperties req = new SetGlobalProperties();
req.setCorrelationID(correlationID);
req.setHelpPrompt(TTSChunkFactory.createSimpleTTSChunks(helpPrompt));
req.setTimeoutPrompt(TTSChunkFactory.createSimpleTTSChunks(timeoutPrompt));
sendRPCRequest(req);
}
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpChunks tts chunks that should be used when prompting the user
* @param timeoutChunks tts chunks that will be used when a timeout occurs
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setGlobalProperties(
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks,
Integer correlationID) throws SdlException {
SetGlobalProperties req = new SetGlobalProperties();
req.setCorrelationID(correlationID);
req.setHelpPrompt(helpChunks);
req.setTimeoutPrompt(timeoutChunks);
sendRPCRequest(req);
}
@SuppressWarnings("unused")
public void resetGlobalProperties(Vector<GlobalProperty> properties,
Integer correlationID) throws SdlException {
ResetGlobalProperties req = new ResetGlobalProperties();
req.setCorrelationID(correlationID);
req.setProperties(properties);
sendRPCRequest(req);
}
/**
* Sends a SetMediaClockTimer RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param hours integer for hours
* @param minutes integer for minutes
* @param seconds integer for seconds
* @param updateMode mode in which the media clock timer should be updated
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setMediaClockTimer(Integer hours,
Integer minutes, Integer seconds, @NonNull UpdateMode updateMode,
Integer correlationID) throws SdlException {
SetMediaClockTimer msg = new SetMediaClockTimer(updateMode);
if (hours != null || minutes != null || seconds != null) {
StartTime startTime = new StartTime(hours, minutes, seconds);
msg.setStartTime(startTime);
}
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Pauses the media clock. Responses are captured through callback on IProxyListener.
*
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void pauseMediaClockTimer(Integer correlationID)
throws SdlException {
SetMediaClockTimer msg = new SetMediaClockTimer(UpdateMode.PAUSE);
StartTime startTime = new StartTime(0, 0, 0);
msg.setStartTime(startTime);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Resumes the media clock. Responses are captured through callback on IProxyListener.
*
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void resumeMediaClockTimer(Integer correlationID)
throws SdlException {
SetMediaClockTimer msg = new SetMediaClockTimer(UpdateMode.RESUME);
StartTime startTime = new StartTime(0, 0, 0);
msg.setStartTime(startTime);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Clears the media clock. Responses are captured through callback on IProxyListener.
*
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void clearMediaClockTimer(Integer correlationID)
throws SdlException {
Show msg = new Show();
msg.setCorrelationID(correlationID);
msg.setMediaClock(" ");
sendRPCRequest(msg);
}
/*Begin V1 Enhanced helper*/
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 text displayed in a single or upper display line.
* @param mainText2 text displayed on the second display line.
* @param mainText3 text displayed on the second "page" first display line.
* @param mainText4 text displayed on the second "page" second display line.
* @param statusBar text is placed in the status bar area (Only valid for NAVIGATION apps)
* @param mediaClock text value for MediaClock field.
* @param mediaTrack text displayed in the track field.
* @param graphic image struct determining whether static or dynamic image to display in app.
* @param softButtons app defined SoftButtons.
* @param customPresets app labeled on-screen presets.
* @param alignment specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("SameParameterValue")
public void show(String mainText1, String mainText2, String mainText3, String mainText4,
String statusBar, String mediaClock, String mediaTrack,
Image graphic, Vector<SoftButton> softButtons, Vector <String> customPresets,
TextAlignment alignment, Integer correlationID)
throws SdlException {
Show msg = new Show();
msg.setCorrelationID(correlationID);
msg.setMainField1(mainText1);
msg.setMainField2(mainText2);
msg.setStatusBar(statusBar);
msg.setMediaClock(mediaClock);
msg.setMediaTrack(mediaTrack);
msg.setAlignment(alignment);
msg.setMainField3(mainText3);
msg.setMainField4(mainText4);
msg.setGraphic(graphic);
msg.setSoftButtons(softButtons);
msg.setCustomPresets(customPresets);
sendRPCRequest(msg);
}
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 -Text displayed in a single or upper display line.
* @param mainText2 -Text displayed on the second display line.
* @param mainText3 -Text displayed on the second "page" first display line.
* @param mainText4 -Text displayed on the second "page" second display line.
* @param graphic -Image struct determining whether static or dynamic image to display in app.
* @param softButtons -App defined SoftButtons.
* @param customPresets -App labeled on-screen presets.
* @param alignment -Specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void show(String mainText1, String mainText2, String mainText3, String mainText4,
Image graphic, Vector<SoftButton> softButtons, Vector <String> customPresets,
TextAlignment alignment, Integer correlationID)
throws SdlException {
show(mainText1, mainText2, mainText3, mainText4, null, null, null, graphic, softButtons, customPresets, alignment, correlationID);
}
/*End V1 Enhanced helper*/
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 text displayed in a single or upper display line.
* @param mainText2 text displayed on the second display line.
* @param statusBar text is placed in the status bar area (Only valid for NAVIGATION apps)
* @param mediaClock text value for MediaClock field.
* @param mediaTrack text displayed in the track field.
* @param alignment specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("SameParameterValue")
public void show(String mainText1, String mainText2,
String statusBar, String mediaClock, String mediaTrack,
TextAlignment alignment, Integer correlationID)
throws SdlException {
Show msg = new Show();
msg.setCorrelationID(correlationID);
msg.setMainField1(mainText1);
msg.setMainField2(mainText2);
msg.setStatusBar(statusBar);
msg.setMediaClock(mediaClock);
msg.setMediaTrack(mediaTrack);
msg.setAlignment(alignment);
sendRPCRequest(msg);
}
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 -Text displayed in a single or upper display line.
* @param mainText2 -Text displayed on the second display line.
* @param alignment -Specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void show(String mainText1, String mainText2,
TextAlignment alignment, Integer correlationID)
throws SdlException {
show(mainText1, mainText2, null, null, null, alignment, correlationID);
}
/**
* Sends a Speak RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void speak(@NonNull String ttsText, Integer correlationID)
throws SdlException {
Speak msg = new Speak(TTSChunkFactory.createSimpleTTSChunks(ttsText));
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a Speak RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsChunks -Text/phonemes to speak in the form of ttsChunks.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void speak(@NonNull Vector<TTSChunk> ttsChunks,
Integer correlationID) throws SdlException {
Speak msg = new Speak(ttsChunks);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Sends a SubscribeButton RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param buttonName -Name of the button to subscribe.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void subscribeButton(@NonNull ButtonName buttonName,
Integer correlationID) throws SdlException {
SubscribeButton msg = new SubscribeButton(buttonName);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
// Protected unregisterAppInterface used to ensure no non-ALM app calls
// unregisterAppInterface.
protected void unregisterAppInterfacePrivate(Integer correlationID)
throws SdlException {
UnregisterAppInterface msg = new UnregisterAppInterface();
msg.setCorrelationID(correlationID);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.UNREGISTER_APP_INTERFACE.toString());
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_REQUEST);
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
sendBroadcastIntent(sendIntent);
sendRPCRequestPrivate(msg);
}
/**
* Sends an UnsubscribeButton RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param buttonName -Name of the button to unsubscribe.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void unsubscribeButton(@NonNull ButtonName buttonName,
Integer correlationID) throws SdlException {
UnsubscribeButton msg = new UnsubscribeButton(buttonName);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Creates a choice to be added to a choiceset. Choice has both a voice and a visual menu component.
*
* @param choiceID -Unique ID used to identify this choice (returned in callback).
* @param choiceMenuName -Text name displayed for this choice.
* @param choiceVrCommands -Vector of vrCommands used to select this choice by voice. Must contain
* at least one non-empty element.
* @return Choice created.
*/
@SuppressWarnings("unused")
public Choice createChoiceSetChoice(Integer choiceID, String choiceMenuName,
Vector<String> choiceVrCommands) {
Choice returnChoice = new Choice();
returnChoice.setChoiceID(choiceID);
returnChoice.setMenuName(choiceMenuName);
returnChoice.setVrCommands(choiceVrCommands);
return returnChoice;
}
/**
* Starts audio pass thru session. Responses are captured through callback on IProxyListener.
*
* @param initialPrompt -SDL will speak this prompt before opening the audio pass thru session.
* @param audioPassThruDisplayText1 -First line of text displayed during audio capture.
* @param audioPassThruDisplayText2 -Second line of text displayed during audio capture.
* @param samplingRate -Allowable values of 8 khz or 16 or 22 or 44 khz.
* @param maxDuration -The maximum duration of audio recording in milliseconds.
* @param bitsPerSample -Specifies the quality the audio is recorded. Currently 8 bit or 16 bit.
* @param audioType -Specifies the type of audio data being requested.
* @param muteAudio -Defines if the current audio source should be muted during the APT session.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void performaudiopassthru(String initialPrompt, String audioPassThruDisplayText1, String audioPassThruDisplayText2,
@NonNull SamplingRate samplingRate, @NonNull Integer maxDuration, @NonNull BitsPerSample bitsPerSample,
@NonNull AudioType audioType, Boolean muteAudio, Integer correlationID) throws SdlException {
Vector<TTSChunk> chunks = TTSChunkFactory.createSimpleTTSChunks(initialPrompt);
PerformAudioPassThru msg = new PerformAudioPassThru(samplingRate, maxDuration, bitsPerSample, audioType);
msg.setCorrelationID(correlationID);
msg.setInitialPrompt(chunks);
msg.setAudioPassThruDisplayText1(audioPassThruDisplayText1);
msg.setAudioPassThruDisplayText2(audioPassThruDisplayText2);
msg.setMuteAudio(muteAudio);
sendRPCRequest(msg);
}
/**
* Ends audio pass thru session. Responses are captured through callback on IProxyListener.
*
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void endaudiopassthru(Integer correlationID) throws SdlException
{
EndAudioPassThru msg = new EndAudioPassThru();
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Subscribes for specific published data items. The data will be only sent if it has changed.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Subscribes to GPS data.
* @param speed -Subscribes to vehicle speed data in kilometers per hour.
* @param rpm -Subscribes to number of revolutions per minute of the engine.
* @param fuelLevel -Subscribes to fuel level in the tank (percentage).
* @param fuelLevel_State -Subscribes to fuel level state.
* @param instantFuelConsumption -Subscribes to instantaneous fuel consumption in microlitres.
* @param externalTemperature -Subscribes to the external temperature in degrees celsius.
* @param prndl -Subscribes to PRNDL data that houses the selected gear.
* @param tirePressure -Subscribes to the TireStatus data containing status and pressure of tires.
* @param odometer -Subscribes to Odometer data in km.
* @param beltStatus -Subscribes to status of the seat belts.
* @param bodyInformation -Subscribes to body information including power modes.
* @param deviceStatus -Subscribes to device status including signal and battery strength.
* @param driverBraking -Subscribes to the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
@Deprecated
public void subscribevehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean prndl, boolean tirePressure,
boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
SubscribeVehicleData msg = new SubscribeVehicleData();
msg.setGps(gps);
msg.setSpeed(speed);
msg.setRpm(rpm);
msg.setFuelLevel(fuelLevel);
msg.setFuelLevel_State(fuelLevel_State);
msg.setInstantFuelConsumption(instantFuelConsumption);
msg.setExternalTemperature(externalTemperature);
msg.setPrndl(prndl);
msg.setTirePressure(tirePressure);
msg.setOdometer(odometer);
msg.setBeltStatus(beltStatus);
msg.setBodyInformation(bodyInformation);
msg.setDeviceStatus(deviceStatus);
msg.setDriverBraking(driverBraking);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Subscribes for specific published data items. The data will be only sent if it has changed.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Subscribes to GPS data.
* @param speed -Subscribes to vehicle speed data in kilometers per hour.
* @param rpm -Subscribes to number of revolutions per minute of the engine.
* @param fuelLevel -Subscribes to fuel level in the tank (percentage).
* @param fuelLevel_State -Subscribes to fuel level state.
* @param instantFuelConsumption -Subscribes to instantaneous fuel consumption in microlitres.
* @param externalTemperature -Subscribes to the external temperature in degrees celsius.
* @param prndl -Subscribes to PRNDL data that houses the selected gear.
* @param tirePressure -Subscribes to the TireStatus data containing status and pressure of tires.
* @param engineOilLife -Subscribes to Engine Oil Life data.
* @param odometer -Subscribes to Odometer data in km.
* @param beltStatus -Subscribes to status of the seat belts.
* @param bodyInformation -Subscribes to body information including power modes.
* @param deviceStatus -Subscribes to device status including signal and battery strength.
* @param driverBraking -Subscribes to the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void subscribevehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean prndl, boolean tirePressure,
boolean engineOilLife, boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
SubscribeVehicleData msg = new SubscribeVehicleData();
msg.setGps(gps);
msg.setSpeed(speed);
msg.setRpm(rpm);
msg.setFuelLevel(fuelLevel);
msg.setFuelLevel_State(fuelLevel_State);
msg.setInstantFuelConsumption(instantFuelConsumption);
msg.setExternalTemperature(externalTemperature);
msg.setPrndl(prndl);
msg.setTirePressure(tirePressure);
msg.setEngineOilLife(engineOilLife);
msg.setOdometer(odometer);
msg.setBeltStatus(beltStatus);
msg.setBodyInformation(bodyInformation);
msg.setDeviceStatus(deviceStatus);
msg.setDriverBraking(driverBraking);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Unsubscribes for specific published data items.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Unsubscribes to GPS data.
* @param speed -Unsubscribes to vehicle speed data in kilometers per hour.
* @param rpm -Unsubscribes to number of revolutions per minute of the engine.
* @param fuelLevel -Unsubscribes to fuel level in the tank (percentage).
* @param fuelLevel_State -Unsubscribes to fuel level state.
* @param instantFuelConsumption -Unsubscribes to instantaneous fuel consumption in microlitres.
* @param externalTemperature -Unsubscribes to the external temperature in degrees celsius.
* @param prndl -Unsubscribes to PRNDL data that houses the selected gear.
* @param tirePressure -Unsubscribes to the TireStatus data containing status and pressure of tires.
* @param odometer -Unsubscribes to Odometer data in km.
* @param beltStatus -Unsubscribes to status of the seat belts.
* @param bodyInformation -Unsubscribes to body information including power modes.
* @param deviceStatus -Unsubscribes to device status including signal and battery strength.
* @param driverBraking -Unsubscribes to the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
@Deprecated
public void unsubscribevehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean prndl, boolean tirePressure,
boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
UnsubscribeVehicleData msg = new UnsubscribeVehicleData();
msg.setGps(gps);
msg.setSpeed(speed);
msg.setRpm(rpm);
msg.setFuelLevel(fuelLevel);
msg.setFuelLevel_State(fuelLevel_State);
msg.setInstantFuelConsumption(instantFuelConsumption);
msg.setExternalTemperature(externalTemperature);
msg.setPrndl(prndl);
msg.setTirePressure(tirePressure);
msg.setOdometer(odometer);
msg.setBeltStatus(beltStatus);
msg.setBodyInformation(bodyInformation);
msg.setDeviceStatus(deviceStatus);
msg.setDriverBraking(driverBraking);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Unsubscribes for specific published data items.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Unsubscribes to GPS data.
* @param speed -Unsubscribes to vehicle speed data in kilometers per hour.
* @param rpm -Unsubscribes to number of revolutions per minute of the engine.
* @param fuelLevel -Unsubscribes to fuel level in the tank (percentage).
* @param fuelLevel_State -Unsubscribes to fuel level state.
* @param instantFuelConsumption -Unsubscribes to instantaneous fuel consumption in microlitres.
* @param externalTemperature -Unsubscribes to the external temperature in degrees celsius.
* @param prndl -Unsubscribes to PRNDL data that houses the selected gear.
* @param tirePressure -Unsubscribes to the TireStatus data containing status and pressure of tires.
* @param engineOilLife -Unsubscribes to Engine Oil Life data.
* @param odometer -Unsubscribes to Odometer data in km.
* @param beltStatus -Unsubscribes to status of the seat belts.
* @param bodyInformation -Unsubscribes to body information including power modes.
* @param deviceStatus -Unsubscribes to device status including signal and battery strength.
* @param driverBraking -Unsubscribes to the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void unsubscribevehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean prndl, boolean tirePressure,
boolean engineOilLife, boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
UnsubscribeVehicleData msg = new UnsubscribeVehicleData();
msg.setGps(gps);
msg.setSpeed(speed);
msg.setRpm(rpm);
msg.setFuelLevel(fuelLevel);
msg.setFuelLevel_State(fuelLevel_State);
msg.setInstantFuelConsumption(instantFuelConsumption);
msg.setExternalTemperature(externalTemperature);
msg.setPrndl(prndl);
msg.setTirePressure(tirePressure);
msg.setEngineOilLife(engineOilLife);
msg.setOdometer(odometer);
msg.setBeltStatus(beltStatus);
msg.setBodyInformation(bodyInformation);
msg.setDeviceStatus(deviceStatus);
msg.setDriverBraking(driverBraking);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Performs a Non periodic vehicle data read request.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Performs an ad-hoc request for GPS data.
* @param speed -Performs an ad-hoc request for vehicle speed data in kilometers per hour.
* @param rpm -Performs an ad-hoc request for number of revolutions per minute of the engine.
* @param fuelLevel -Performs an ad-hoc request for fuel level in the tank (percentage).
* @param fuelLevel_State -Performs an ad-hoc request for fuel level state.
* @param instantFuelConsumption -Performs an ad-hoc request for instantaneous fuel consumption in microlitres.
* @param externalTemperature -Performs an ad-hoc request for the external temperature in degrees celsius.
* @param vin -Performs an ad-hoc request for the Vehicle identification number
* @param prndl -Performs an ad-hoc request for PRNDL data that houses the selected gear.
* @param tirePressure -Performs an ad-hoc request for the TireStatus data containing status and pressure of tires.
* @param odometer -Performs an ad-hoc request for Odometer data in km.
* @param beltStatus -Performs an ad-hoc request for status of the seat belts.
* @param bodyInformation -Performs an ad-hoc request for body information including power modes.
* @param deviceStatus -Performs an ad-hoc request for device status including signal and battery strength.
* @param driverBraking -Performs an ad-hoc request for the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
@Deprecated
public void getvehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean vin, boolean prndl, boolean tirePressure,
boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
GetVehicleData msg = new GetVehicleData();
msg.setGps(gps);
msg.setSpeed(speed);
msg.setRpm(rpm);
msg.setFuelLevel(fuelLevel);
msg.setFuelLevel_State(fuelLevel_State);
msg.setInstantFuelConsumption(instantFuelConsumption);
msg.setExternalTemperature(externalTemperature);
msg.setVin(vin);
msg.setPrndl(prndl);
msg.setTirePressure(tirePressure);
msg.setOdometer(odometer);
msg.setBeltStatus(beltStatus);
msg.setBodyInformation(bodyInformation);
msg.setDeviceStatus(deviceStatus);
msg.setDriverBraking(driverBraking);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Performs a Non periodic vehicle data read request.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Performs an ad-hoc request for GPS data.
* @param speed -Performs an ad-hoc request for vehicle speed data in kilometers per hour.
* @param rpm -Performs an ad-hoc request for number of revolutions per minute of the engine.
* @param fuelLevel -Performs an ad-hoc request for fuel level in the tank (percentage).
* @param fuelLevel_State -Performs an ad-hoc request for fuel level state.
* @param instantFuelConsumption -Performs an ad-hoc request for instantaneous fuel consumption in microlitres.
* @param externalTemperature -Performs an ad-hoc request for the external temperature in degrees celsius.
* @param vin -Performs an ad-hoc request for the Vehicle identification number
* @param prndl -Performs an ad-hoc request for PRNDL data that houses the selected gear.
* @param tirePressure -Performs an ad-hoc request for the TireStatus data containing status and pressure of tires.
* @param engineOilLife -Performs an ad-hoc request for Engine Oil Life data.
* @param odometer -Performs an ad-hoc request for Odometer data in km.
* @param beltStatus -Performs an ad-hoc request for status of the seat belts.
* @param bodyInformation -Performs an ad-hoc request for body information including power modes.
* @param deviceStatus -Performs an ad-hoc request for device status including signal and battery strength.
* @param driverBraking -Performs an ad-hoc request for the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void getvehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean vin, boolean prndl, boolean tirePressure,
boolean engineOilLife, boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
GetVehicleData msg = new GetVehicleData();
msg.setGps(gps);
msg.setSpeed(speed);
msg.setRpm(rpm);
msg.setFuelLevel(fuelLevel);
msg.setFuelLevel_State(fuelLevel_State);
msg.setInstantFuelConsumption(instantFuelConsumption);
msg.setExternalTemperature(externalTemperature);
msg.setVin(vin);
msg.setPrndl(prndl);
msg.setTirePressure(tirePressure);
msg.setEngineOilLife(engineOilLife);
msg.setOdometer(odometer);
msg.setBeltStatus(beltStatus);
msg.setBodyInformation(bodyInformation);
msg.setDeviceStatus(deviceStatus);
msg.setDriverBraking(driverBraking);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Creates a full screen overlay containing a large block of formatted text that can be scrolled with up to 8 SoftButtons defined.
* Responses are captured through callback on IProxyListener.
*
* @param scrollableMessageBody -Body of text that can include newlines and tabs.
* @param timeout -App defined timeout. Indicates how long of a timeout from the last action.
* @param softButtons -App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void scrollablemessage(@NonNull String scrollableMessageBody, Integer timeout, Vector<SoftButton> softButtons, Integer correlationID) throws SdlException
{
ScrollableMessage msg = new ScrollableMessage(scrollableMessageBody);
msg.setCorrelationID(correlationID);
msg.setTimeout(timeout);
msg.setSoftButtons(softButtons);
sendRPCRequest(msg);
}
/**
* Creates a full screen or pop-up overlay (depending on platform) with a single user controlled slider.
* Responses are captured through callback on IProxyListener.
*
* @param numTicks -Number of selectable items on a horizontal axis.
* @param position -Initial position of slider control (cannot exceed numTicks).
* @param sliderHeader -Text header to display.
* @param sliderFooter - Text footer to display (meant to display min/max threshold descriptors).
* @param timeout -App defined timeout. Indicates how long of a timeout from the last action.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void slider(@NonNull Integer numTicks, @NonNull Integer position, @NonNull String sliderHeader, Vector<String> sliderFooter, Integer timeout, Integer correlationID) throws SdlException
{
Slider msg = new Slider(numTicks, position, sliderHeader);
msg.setCorrelationID(correlationID);
msg.setSliderFooter(sliderFooter);
msg.setTimeout(timeout);
sendRPCRequest(msg);
}
/**
* Responses are captured through callback on IProxyListener.
*
* @param language requested SDL voice engine (VR+TTS) language registration
* @param hmiDisplayLanguage request display language registration.
* @param correlationID ID to be attached to the RPCRequest that correlates the RPCResponse
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void changeregistration(@NonNull Language language, @NonNull Language hmiDisplayLanguage, Integer correlationID) throws SdlException
{
ChangeRegistration msg = new ChangeRegistration(language, hmiDisplayLanguage);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param is - The input stream of byte data that putFileStream will read from
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of the file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
* @throws SdlException if an unrecoverable error is encountered
* @see #putFileStream(InputStream, String, Long, Long)
*/
@SuppressWarnings("unused")
@Deprecated
public void putFileStream(InputStream is, @NonNull String sdlFileName, Integer iOffset, Integer iLength) throws SdlException
{
PutFile msg = new PutFile(sdlFileName, FileType.BINARY);
msg.setCorrelationID(10000);
msg.setSystemFile(true);
msg.setOffset(iOffset);
msg.setLength(iLength);
startRPCStream(is, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile
* device. Responses are captured through callback on IProxyListener.
*
* @param inputStream The input stream of byte data that will be read from.
* @param fileName The SDL file reference name used by the RPC.
* @param offset The data offset in bytes. A value of zero is used to
* indicate data starting from the beginning of the file and a value greater
* than zero is used for resuming partial data chunks.
* @param length The total length of the file being sent.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void putFileStream(InputStream inputStream, @NonNull String fileName, Long offset, Long length) throws SdlException {
PutFile msg = new PutFile(fileName, FileType.BINARY);
msg.setCorrelationID(10000);
msg.setSystemFile(true);
msg.setOffset(offset);
msg.setLength(length);
startRPCStream(inputStream, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of a file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
*
* @return OutputStream - The output stream of byte data that is written to by the app developer
* @throws SdlException if an unrecoverable error is encountered
* @see #putFileStream(String, Long, Long)
*/
@SuppressWarnings("unused")
@Deprecated
public OutputStream putFileStream(@NonNull String sdlFileName, Integer iOffset, Integer iLength) throws SdlException
{
PutFile msg = new PutFile(sdlFileName, FileType.BINARY);
msg.setCorrelationID(10000);
msg.setSystemFile(true);
msg.setOffset(iOffset);
msg.setLength(iLength);
return startRPCStream(msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile
* device. Responses are captured through callback on IProxyListener.
*
* @param fileName The SDL file reference name used by the RPC.
* @param offset The data offset in bytes. A value of zero is used to
* indicate data starting from the beginning of the file and a value greater
* than zero is used for resuming partial data chunks.
* @param length The total length of the file being sent.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public OutputStream putFileStream(@NonNull String fileName, Long offset, Long length) throws SdlException {
PutFile msg = new PutFile(fileName, FileType.BINARY);
msg.setCorrelationID(10000);
msg.setSystemFile(true);
msg.setOffset(offset);
msg.setLength(length);
return startRPCStream(msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param is - The input stream of byte data that PutFileStream will read from
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of the file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
* @param fileType - The selected file type -- see the FileType enumeration for details
* @param bPersistentFile - Indicates if the file is meant to persist between sessions / ignition cycles.
* @param bSystemFile - Indicates if the file is meant to be passed thru core to elsewhere on the system.
* @throws SdlException if an unrecoverable error is encountered
* @see #putFileStream(InputStream, String, Long, Long, FileType, Boolean, Boolean, OnPutFileUpdateListener)
*/
@SuppressWarnings("unused")
@Deprecated
public void putFileStream(InputStream is, @NonNull String sdlFileName, Integer iOffset, Integer iLength, @NonNull FileType fileType, Boolean bPersistentFile, Boolean bSystemFile) throws SdlException
{
PutFile msg = new PutFile(sdlFileName, fileType);
msg.setCorrelationID(10000);
msg.setPersistentFile(bPersistentFile);
msg.setSystemFile(bSystemFile);
msg.setOffset(iOffset);
msg.setLength(iLength);
startRPCStream(is, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile
* device. Responses are captured through callback on IProxyListener.
*
* @param inputStream The input stream of byte data that will be read from.
* @param fileName The SDL file reference name used by the RPC.
* @param offset The data offset in bytes. A value of zero is used to
* indicate data starting from the beginning of the file and a value greater
* than zero is used for resuming partial data chunks.
* @param length The total length of the file being sent.
* @param fileType The selected file type. See the {@link FileType} enum for
* details.
* @param isPersistentFile Indicates if the file is meant to persist between
* sessions / ignition cycles.
* @param isSystemFile Indicates if the file is meant to be passed through
* core to elsewhere in the system.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void putFileStream(InputStream inputStream, @NonNull String fileName, Long offset, Long length, FileType fileType, Boolean isPersistentFile, Boolean isSystemFile, OnPutFileUpdateListener cb) throws SdlException {
PutFile msg = new PutFile(fileName, FileType.BINARY);
msg.setCorrelationID(10000);
msg.setSystemFile(true);
msg.setOffset(offset);
msg.setLength(length);
msg.setOnPutFileUpdateListener(cb);
startRPCStream(inputStream, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of a file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
* @param fileType - The selected file type -- see the FileType enumeration for details
* @param bPersistentFile - Indicates if the file is meant to persist between sessions / ignition cycles.
* @param bSystemFile - Indicates if the file is meant to be passed thru core to elsewhere on the system.
* @return OutputStream - The output stream of byte data that is written to by the app developer
* @throws SdlException if an unrecoverable error is encountered
* @see #putFileStream(String, Long, Long, FileType, Boolean, Boolean, OnPutFileUpdateListener)
*/
@SuppressWarnings("unused")
@Deprecated
public OutputStream putFileStream(@NonNull String sdlFileName, Integer iOffset, Integer iLength, @NonNull FileType fileType, Boolean bPersistentFile, Boolean bSystemFile) throws SdlException
{
PutFile msg = new PutFile(sdlFileName, fileType);
msg.setCorrelationID(10000);
msg.setPersistentFile(bPersistentFile);
msg.setSystemFile(bSystemFile);
msg.setOffset(iOffset);
msg.setLength(iLength);
return startRPCStream(msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile
* device. Responses are captured through callback on IProxyListener.
*
* @param fileName The SDL file reference name used by the RPC.
* @param offset The data offset in bytes. A value of zero is used to
* indicate data starting from the beginning of the file and a value greater
* than zero is used for resuming partial data chunks.
* @param length The total length of the file being sent.
* @param fileType The selected file type. See the {@link FileType} enum for
* details.
* @param isPersistentFile Indicates if the file is meant to persist between
* sessions / ignition cycles.
* @param isSystemFile Indicates if the file is meant to be passed through
* core to elsewhere in the system.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public OutputStream putFileStream(@NonNull String fileName, Long offset, Long length, FileType fileType, Boolean isPersistentFile, Boolean isSystemFile, OnPutFileUpdateListener cb) throws SdlException {
PutFile msg = new PutFile(fileName, FileType.BINARY);
msg.setCorrelationID(10000);
msg.setSystemFile(true);
msg.setOffset(offset);
msg.setLength(length);
msg.setOnPutFileUpdateListener(cb);
return startRPCStream(msg);
}
/**
* Used to push a stream of putfile RPC's containing binary data from a mobile device to the module.
* Responses are captured through callback on IProxyListener.
*
* @param sPath - The physical file path on the mobile device.
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of a file.
* A value greater than zero is used for resuming partial data chunks.
* @param fileType - The selected file type -- see the FileType enumeration for details
* @param bPersistentFile - Indicates if the file is meant to persist between sessions / ignition cycles.
* @param bSystemFile - Indicates if the file is meant to be passed thru core to elsewhere on the system.
* @param iCorrelationID - A unique ID that correlates each RPCRequest and RPCResponse.
* @return RPCStreamController - If the putFileStream was not started successfully null is returned, otherwise a valid object reference is returned
* @throws SdlException if an unrecoverable error is encountered
* @see #putFileStream(String, String, Long, FileType, Boolean, Boolean, Boolean, Integer, OnPutFileUpdateListener)
*/
@SuppressWarnings("unused")
@Deprecated
public RPCStreamController putFileStream(String sPath, @NonNull String sdlFileName, Integer iOffset, @NonNull FileType fileType, Boolean bPersistentFile, Boolean bSystemFile, Integer iCorrelationID) throws SdlException
{
PutFile msg = new PutFile(sdlFileName, fileType);
msg.setCorrelationID(iCorrelationID);
msg.setPersistentFile(bPersistentFile);
msg.setSystemFile(bSystemFile);
msg.setOffset(iOffset);
msg.setLength(0);
return startPutFileStream(sPath, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile
* device. Responses are captured through callback on IProxyListener.
*
* @param path The physical file path on the mobile device.
* @param fileName The SDL file reference name used by the RPC.
* @param offset The data offset in bytes. A value of zero is used to
* indicate data starting from the beginning of the file and a value greater
* than zero is used for resuming partial data chunks.
* @param fileType The selected file type. See the {@link FileType} enum for
* details.
* @param isPersistentFile Indicates if the file is meant to persist between
* sessions / ignition cycles.
* @param isSystemFile Indicates if the file is meant to be passed through
* core to elsewhere in the system.
* @param correlationId A unique id that correlates each RPCRequest and
* RPCResponse.
* @return RPCStreamController If the putFileStream was not started
* successfully null is returned, otherwise a valid object reference is
* returned .
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public RPCStreamController putFileStream(String path, @NonNull String fileName, Long offset, @NonNull FileType fileType, Boolean isPersistentFile, Boolean isSystemFile, Boolean isPayloadProtected, Integer correlationId, OnPutFileUpdateListener cb ) throws SdlException {
PutFile msg = new PutFile(fileName, fileType);
msg.setCorrelationID(correlationId);
msg.setPersistentFile(isPersistentFile);
msg.setSystemFile(isSystemFile);
msg.setOffset(offset);
msg.setLength(0L);
msg.setPayloadProtected(isPayloadProtected);
msg.setOnPutFileUpdateListener(cb);
return startPutFileStream(path,msg);
}
/**
* Used to push a stream of putfile RPC's containing binary data from a mobile device to the module.
* Responses are captured through callback on IProxyListener.
*
* @param is - The input stream of byte data that putFileStream will read from.
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of a file.
* A value greater than zero is used for resuming partial data chunks.
* @param fileType - The selected file type -- see the FileType enumeration for details
* @param bPersistentFile - Indicates if the file is meant to persist between sessions / ignition cycles.
* @param bSystemFile - Indicates if the file is meant to be passed thru core to elsewhere on the system.
* @param iCorrelationID - A unique ID that correlates each RPCRequest and RPCResponse.
* @return RPCStreamController - If the putFileStream was not started successfully null is returned, otherwise a valid object reference is returned
* @throws SdlException if an unrecoverable error is encountered
* @see #putFileStream(InputStream, String, Long, Long, FileType, Boolean, Boolean, Boolean, Integer)
*/
@SuppressWarnings("unused")
@Deprecated
public RPCStreamController putFileStream(InputStream is, @NonNull String sdlFileName, Integer iOffset, Integer iLength, @NonNull FileType fileType, Boolean bPersistentFile, Boolean bSystemFile, Integer iCorrelationID) throws SdlException
{
PutFile msg = new PutFile(sdlFileName, fileType);
msg.setCorrelationID(iCorrelationID);
msg.setPersistentFile(bPersistentFile);
msg.setSystemFile(bSystemFile);
msg.setOffset(iOffset);
msg.setLength(iLength);
return startPutFileStream(is, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile
* device. Responses are captured through callback on IProxyListener.
*
* @param inputStream The input stream of byte data that will be read from.
* @param fileName The SDL file reference name used by the RPC.
* @param offset The data offset in bytes. A value of zero is used to
* indicate data starting from the beginning of the file and a value greater
* than zero is used for resuming partial data chunks.
* @param length The total length of the file being sent.
* @param fileType The selected file type. See the {@link FileType} enum for
* details.
* @param isPersistentFile Indicates if the file is meant to persist between
* sessions / ignition cycles.
* @param isSystemFile Indicates if the file is meant to be passed through
* core to elsewhere in the system.
* @param correlationId A unique id that correlates each RPCRequest and
* RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public RPCStreamController putFileStream(InputStream inputStream, @NonNull String fileName, Long offset, Long length, @NonNull FileType fileType, Boolean isPersistentFile, Boolean isSystemFile, Boolean isPayloadProtected, Integer correlationId) throws SdlException {
PutFile msg = new PutFile(fileName, fileType);
msg.setCorrelationID(correlationId);
msg.setPersistentFile(isPersistentFile);
msg.setSystemFile(isSystemFile);
msg.setOffset(offset);
msg.setLength(length);
msg.setPayloadProtected(isPayloadProtected);
return startPutFileStream(inputStream, msg);
}
/**
*
* Used to end an existing putFileStream that was previously initiated with any putFileStream method.
*
*/
@SuppressWarnings("unused")
public void endPutFileStream()
{
endRPCStream();
}
/**
* Used to push a binary data onto the SDL module from a mobile device, such as icons and album art. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName -File reference name.
* @param fileType -Selected file type.
* @param persistentFile -Indicates if the file is meant to persist between sessions / ignition cycles.
* @param fileData byte array of data of the file that is to be sent
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void putfile(@NonNull String sdlFileName, @NonNull FileType fileType, Boolean persistentFile, byte[] fileData, Integer correlationID) throws SdlException
{
PutFile msg = new PutFile(sdlFileName, fileType);
msg.setCorrelationID(correlationID);
msg.setPersistentFile(persistentFile);
msg.setBulkData(fileData);
sendRPCRequest(msg);
}
/**
* Used to delete a file resident on the SDL module in the app's local cache. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName -File reference name.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void deletefile(@NonNull String sdlFileName, Integer correlationID) throws SdlException
{
DeleteFile msg = new DeleteFile(sdlFileName);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Requests the current list of resident filenames for the registered app. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void listfiles(Integer correlationID) throws SdlException
{
ListFiles msg = new ListFiles();
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Used to set existing local file on SDL as the app's icon. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName -File reference name.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setappicon(@NonNull String sdlFileName, Integer correlationID) throws SdlException
{
SetAppIcon msg = new SetAppIcon(sdlFileName);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Set an alternate display layout. If not sent, default screen for given platform will be shown.
* Responses are captured through callback on IProxyListener.
*
* @param displayLayout -Predefined or dynamically created screen layout.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setdisplaylayout(@NonNull String displayLayout, Integer correlationID) throws SdlException
{
SetDisplayLayout msg = new SetDisplayLayout(displayLayout);
msg.setCorrelationID(correlationID);
sendRPCRequest(msg);
}
/**
* Set an alternate display layout. If not sent, default screen for given platform will be shown.
* Responses are captured through callback on IProxyListener.
*
* @param displayLayout -Predefined or dynamically created screen layout.
* @param dayColorScheme a TemplateColorScheme object representing the colors that will be used for day color scheme
* @param nightColorScheme a TemplateColorScheme object representing the colors that will be used for night color scheme
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException if an unrecoverable error is encountered
*/
@SuppressWarnings("unused")
public void setdisplaylayout(String displayLayout, TemplateColorScheme dayColorScheme, TemplateColorScheme nightColorScheme, Integer correlationID) throws SdlException
{
SetDisplayLayout msg = new SetDisplayLayout(displayLayout);
msg.setCorrelationID(correlationID);
msg.setDayColorScheme(dayColorScheme);
msg.setNightColorScheme(nightColorScheme);
sendRPCRequest(msg);
}
/**
* Gets the SystemCapabilityManager. <br>
* @return a SystemCapabilityManager object
*/
public SystemCapabilityManager getSystemCapabilityManager() {
return _systemCapabilityManager;
}
@SuppressWarnings("unused")
public boolean isCapabilitySupported(SystemCapabilityType systemCapabilityType) {
return _systemCapabilityManager != null && _systemCapabilityManager.isCapabilitySupported(systemCapabilityType);
}
@SuppressWarnings("unused")
public void getCapability(SystemCapabilityType systemCapabilityType, OnSystemCapabilityListener scListener){
if(_systemCapabilityManager != null){
_systemCapabilityManager.getCapability(systemCapabilityType, scListener);
}
}
@SuppressWarnings("unused")
public Object getCapability(SystemCapabilityType systemCapabilityType){
if(_systemCapabilityManager != null ){
return _systemCapabilityManager.getCapability(systemCapabilityType);
}else{
return null;
}
}
/**
* Add a listener to be called whenever a new capability is retrieved
* @param systemCapabilityType Type of capability desired
* @param listener callback to execute upon retrieving capability
*/
public void addOnSystemCapabilityListener(final SystemCapabilityType systemCapabilityType, final OnSystemCapabilityListener listener) {
if(_systemCapabilityManager != null){
_systemCapabilityManager.addOnSystemCapabilityListener(systemCapabilityType, listener);
}
}
/**
* Remove an OnSystemCapabilityListener that was previously added
* @param systemCapabilityType Type of capability
* @param listener the listener that should be removed
*/
public boolean removeOnSystemCapabilityListener(final SystemCapabilityType systemCapabilityType, final OnSystemCapabilityListener listener){
if(_systemCapabilityManager != null){
return _systemCapabilityManager.removeOnSystemCapabilityListener(systemCapabilityType, listener);
}
return false;
}
/* ******************* END Public Helper Methods *************************/
/**
* Gets type of transport currently used by this SdlProxy.
*
* @return One of TransportType enumeration values.
*
* @see TransportType
*/
@SuppressWarnings("unused")
public TransportType getCurrentTransportType() throws IllegalStateException {
if (sdlSession == null) {
throw new IllegalStateException("Incorrect state of SdlProxyBase: Calling for getCurrentTransportType() while connection is not initialized");
}
return sdlSession.getCurrentTransportType();
}
public void setSdlSecurityClassList(List<Class<? extends SdlSecurityBase>> list) {
_secList = list;
}
private void setSdlSecurity(SdlSecurityBase sec) {
if (sdlSession != null)
{
sdlSession.setSdlSecurity(sec);
}
}
@SuppressWarnings("unused")
public boolean isServiceTypeProtected(SessionType sType) {
return sdlSession != null && sdlSession.isServiceProtected(sType);
}
public void addServiceListener(SessionType serviceType, ISdlServiceListener sdlServiceListener){
if(serviceType != null && sdlSession != null && sdlServiceListener != null){
sdlSession.addServiceListener(serviceType, sdlServiceListener);
}
}
public void removeServiceListener(SessionType serviceType, ISdlServiceListener sdlServiceListener){
if(serviceType != null && sdlSession != null && sdlServiceListener != null){
sdlSession.removeServiceListener(serviceType, sdlServiceListener);
}
}
@SuppressWarnings("unused")
public VideoStreamingParameters getAcceptedVideoParams(){
return sdlSession.getAcceptedVideoParams();
}
public IProxyListenerBase getProxyListener()
{
return _proxyListener;
}
@SuppressWarnings("unused")
public String getAppName()
{
return _applicationName;
}
@SuppressWarnings("unused")
public String getNgnAppName()
{
return _ngnMediaScreenAppName;
}
@SuppressWarnings("unused")
public String getAppID()
{
return _appID;
}
@SuppressWarnings("unused")
public DeviceInfo getDeviceInfo()
{
return deviceInfo;
}
@SuppressWarnings("unused")
public long getInstanceDT()
{
return instanceDateTime;
}
@SuppressWarnings("unused")
public void setConnectionDetails(String sDetails)
{
sConnectionDetails = sDetails;
}
@SuppressWarnings("unused")
public String getConnectionDetails()
{
return sConnectionDetails;
}
//for testing only
@SuppressWarnings("unused")
public void setPoliciesURL(String sText)
{
sPoliciesURL = sText;
}
//for testing only
public String getPoliciesURL()
{
return sPoliciesURL;
}
/**
* Tells developer whether or not their app icon has been resumed on core.
* @return boolean - true if icon was resumed, false if not
* @throws SdlException if proxy is disposed or app is not registered
*/
public boolean getIconResumed() throws SdlException {
// Test if proxy has been disposed
if (_proxyDisposed) {
throw new SdlException("This object has been disposed, it is no long capable of executing methods.", SdlExceptionCause.SDL_PROXY_DISPOSED);
}
// Test SDL availability
if (!_appInterfaceRegisterd) {
throw new SdlException("SDL is not connected. Unable to determine if app icon was resumed.", SdlExceptionCause.SDL_UNAVAILABLE);
}
return _iconResumed;
}
/**
* VideoStreamingManager houses all the elements needed to create a scoped, streaming manager for video projection. It is only a private, instance
* dependant class at the moment until it can become public. Once the class is public and API defined, it will be moved into the SdlSession class
*/
@TargetApi(19)
private class VideoStreamingManager implements ISdlServiceListener{
Context context;
ISdl internalInterface;
volatile VirtualDisplayEncoder encoder;
private Class<? extends SdlRemoteDisplay> remoteDisplayClass = null;
SdlRemoteDisplay remoteDisplay;
IVideoStreamListener streamListener;
float[] touchScalar = {1.0f,1.0f}; //x, y
private HapticInterfaceManager hapticManager;
SdlMotionEvent sdlMotionEvent = null;
public VideoStreamingManager(Context context,ISdl iSdl){
this.context = context;
this.internalInterface = iSdl;
encoder = new VirtualDisplayEncoder();
internalInterface.addServiceListener(SessionType.NAV,this);
//Take care of the touch events
internalInterface.addOnRPCNotificationListener(FunctionID.ON_TOUCH_EVENT, new OnRPCNotificationListener() {
@Override
public void onNotified(RPCNotification notification) {
if(notification !=null && remoteDisplay != null){
MotionEvent event = convertTouchEvent((OnTouchEvent)notification);
if(event!=null){
remoteDisplay.handleMotionEvent(event);
}
}
}
});
}
public void startVideoStreaming(Class<? extends SdlRemoteDisplay> remoteDisplayClass, VideoStreamingParameters parameters, boolean encrypted){
streamListener = startVideoStream(encrypted,parameters);
if(streamListener == null){
Log.e(TAG, "Error starting video service");
return;
}
VideoStreamingCapability capability = (VideoStreamingCapability)_systemCapabilityManager.getCapability(SystemCapabilityType.VIDEO_STREAMING);
if(capability != null && capability.getIsHapticSpatialDataSupported()){
hapticManager = new HapticInterfaceManager(internalInterface);
}
this.remoteDisplayClass = remoteDisplayClass;
try {
encoder.init(context,streamListener,parameters);
//We are all set so we can start streaming at at this point
encoder.start();
//Encoder should be up and running
createRemoteDisplay(encoder.getVirtualDisplay());
} catch (Exception e) {
e.printStackTrace();
}
Log.d(TAG, parameters.toString());
}
public void stopStreaming(){
if(remoteDisplay!=null){
remoteDisplay.stop();
remoteDisplay = null;
}
if(encoder!=null){
encoder.shutDown();
}
if(internalInterface!=null){
internalInterface.stopVideoService();
}
}
public void dispose(){
stopStreaming();
internalInterface.removeServiceListener(SessionType.NAV,this);
}
private void createRemoteDisplay(final Display disp){
try{
if (disp == null){
return;
}
// Dismiss the current presentation if the display has changed.
if (remoteDisplay != null && remoteDisplay.getDisplay() != disp) {
remoteDisplay.dismissPresentation();
}
FutureTask<Boolean> fTask = new FutureTask<Boolean>( new SdlRemoteDisplay.Creator(context, disp, remoteDisplay, remoteDisplayClass, new SdlRemoteDisplay.Callback(){
@Override
public void onCreated(final SdlRemoteDisplay remoteDisplay) {
//Remote display has been created.
//Now is a good time to do parsing for spatial data
SdlProxyBase.VideoStreamingManager.this.remoteDisplay = remoteDisplay;
if(hapticManager != null) {
remoteDisplay.getMainView().post(new Runnable() {
@Override
public void run() {
hapticManager.refreshHapticData(remoteDisplay.getMainView());
}
});
}
//Get touch scalars
ImageResolution resolution = null;
if(protocolVersion!= null && protocolVersion.getMajor()>=5){ //At this point we should already have the capability
VideoStreamingCapability capability = (VideoStreamingCapability)_systemCapabilityManager.getCapability(SystemCapabilityType.VIDEO_STREAMING);
if (capability != null) {
resolution = capability.getPreferredResolution();
}
}
if(resolution == null){ //Either the protocol version is too low to access video streaming caps, or they were null
DisplayCapabilities dispCap = (DisplayCapabilities) internalInterface.getCapability(SystemCapabilityType.DISPLAY);
if (dispCap != null) {
resolution = (dispCap.getScreenParams().getImageResolution());
}
}
if(resolution != null){
DisplayMetrics displayMetrics = new DisplayMetrics();
disp.getMetrics(displayMetrics);
touchScalar[0] = ((float)displayMetrics.widthPixels) / resolution.getResolutionWidth();
touchScalar[1] = ((float)displayMetrics.heightPixels) / resolution.getResolutionHeight();
}
}
@Override
public void onInvalidated(final SdlRemoteDisplay remoteDisplay) {
//Our view has been invalidated
//A good time to refresh spatial data
if(hapticManager != null) {
remoteDisplay.getMainView().post(new Runnable() {
@Override
public void run() {
hapticManager.refreshHapticData(remoteDisplay.getMainView());
}
});
}
}
} ));
Thread showPresentation = new Thread(fTask);
showPresentation.start();
} catch (Exception ex) {
Log.e(TAG, "Unable to create Virtual Display.");
}
}
@Override
public void onServiceStarted(SdlSession session, SessionType type, boolean isEncrypted) {
}
@Override
public void onServiceEnded(SdlSession session, SessionType type) {
if(SessionType.NAV.equals(type)){
if(remoteDisplay!=null){
stopStreaming();
}
}
}
@Override
public void onServiceError(SdlSession session, SessionType type, String reason) {
}
private MotionEvent convertTouchEvent(OnTouchEvent touchEvent){
List<TouchEvent> eventList = touchEvent.getEvent();
if (eventList == null || eventList.size() == 0) return null;
TouchType touchType = touchEvent.getType();
if (touchType == null){ return null;}
int eventListSize = eventList.size();
MotionEvent.PointerProperties[] pointerProperties = new MotionEvent.PointerProperties[eventListSize];
MotionEvent.PointerCoords[] pointerCoords = new MotionEvent.PointerCoords[eventListSize];
TouchEvent event;
MotionEvent.PointerProperties properties;
MotionEvent.PointerCoords coords;
TouchCoord touchCoord;
for(int i = 0; i < eventListSize; i++){
event = eventList.get(i);
if(event == null || event.getId() == null || event.getTouchCoordinates() == null){
continue;
}
properties = new MotionEvent.PointerProperties();
properties.id = event.getId();
properties.toolType = MotionEvent.TOOL_TYPE_FINGER;
List<TouchCoord> coordList = event.getTouchCoordinates();
if (coordList == null || coordList.size() == 0){ continue; }
touchCoord = coordList.get(coordList.size() -1);
if(touchCoord == null){ continue; }
coords = new MotionEvent.PointerCoords();
coords.x = touchCoord.getX() * touchScalar[0];
coords.y = touchCoord.getY() * touchScalar[1];
coords.orientation = 0;
coords.pressure = 1.0f;
coords.size = 1;
//Add the info to lists only after we are sure we have all available info
pointerProperties[i] = properties;
pointerCoords[i] = coords;
}
if(sdlMotionEvent == null) {
if (touchType == TouchType.BEGIN) {
sdlMotionEvent = new SdlMotionEvent();
}else{
return null;
}
}
int eventAction = sdlMotionEvent.getMotionEvent(touchType, pointerProperties);
long startTime = sdlMotionEvent.startOfEvent;
//If the motion event should be finished we should clear our reference
if(eventAction == MotionEvent.ACTION_UP || eventAction == MotionEvent.ACTION_CANCEL){
sdlMotionEvent = null;
}
return MotionEvent.obtain(startTime, SystemClock.uptimeMillis(), eventAction, eventListSize, pointerProperties, pointerCoords, 0, 0,1,1,0,0, InputDevice.SOURCE_TOUCHSCREEN,0);
}
}
/**
* Keeps track of the current motion event for VPM
*/
private static class SdlMotionEvent{
long startOfEvent;
SparseIntArray pointerStatuses = new SparseIntArray();
SdlMotionEvent(){
startOfEvent = SystemClock.uptimeMillis();
}
/**
* Handles the SDL Touch Event to keep track of pointer status and returns the appropirate
* Android MotionEvent according to this events status
* @param touchType The SDL TouchType that was received from the module
* @param pointerProperties the parsed pointer properties built from the OnTouchEvent RPC
* @return the correct native Andorid MotionEvent action to dispatch
*/
synchronized int getMotionEvent(TouchType touchType, MotionEvent.PointerProperties[] pointerProperties){
int motionEvent = MotionEvent.ACTION_DOWN;
switch (touchType){
case BEGIN:
if(pointerStatuses.size() == 0){
//The motion event has just begun
motionEvent = MotionEvent.ACTION_DOWN;
}else{
motionEvent = MotionEvent.ACTION_POINTER_DOWN;
}
setPointerStatuses(motionEvent, pointerProperties);
break;
case MOVE:
motionEvent = MotionEvent.ACTION_MOVE;
setPointerStatuses(motionEvent, pointerProperties);
break;
case END:
//Clears out pointers that have ended
setPointerStatuses(MotionEvent.ACTION_UP, pointerProperties);
if(pointerStatuses.size() == 0){
//The motion event has just ended
motionEvent = MotionEvent.ACTION_UP;
}else{
motionEvent = MotionEvent.ACTION_POINTER_UP;
}
break;
case CANCEL:
//Assuming this cancels the entire event
motionEvent = MotionEvent.ACTION_CANCEL;
pointerStatuses.clear();
break;
default:
break;
}
return motionEvent;
}
private void setPointerStatuses(int motionEvent, MotionEvent.PointerProperties[] pointerProperties){
for(int i = 0; i < pointerProperties.length; i ++){
MotionEvent.PointerProperties properties = pointerProperties[i];
if(properties != null){
if(motionEvent == MotionEvent.ACTION_UP || motionEvent == MotionEvent.ACTION_POINTER_UP){
pointerStatuses.delete(properties.id);
}else if(motionEvent == MotionEvent.ACTION_DOWN && properties.id == 0){
pointerStatuses.put(properties.id, MotionEvent.ACTION_DOWN);
}else{
pointerStatuses.put(properties.id, motionEvent);
}
}
}
}
}
} // end-class
|
package com.xll.upms.admin.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.xll.upms.admin.mapper.SysDeptRelationMapper;
import com.xll.upms.admin.model.entity.SysDeptRelation;
import com.xll.upms.admin.service.SysDeptRelationService;
import org.springframework.stereotype.Service;
/**
* @Author 徐亮亮
* @Description: 服务实现类
* @Date 2019/1/18 21:51
*/
@Service
public class SysDeptRelationServiceImpl extends ServiceImpl<SysDeptRelationMapper, SysDeptRelation> implements SysDeptRelationService {
}
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.device.gamepad;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThan;
import android.os.Build;
import android.view.KeyEvent;
import android.view.MotionEvent;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.base.test.util.Feature;
import java.util.Arrays;
import java.util.BitSet;
/**
* Verify no regressions in gamepad mappings.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class GamepadMappingsTest {
private static final float ERROR_TOLERANCE = 0.000001f;
/**
* Product ID for Xbox One S gamepads with updated firmware connected over Bluetooth.
* Microsoft released a firmware update for this gamepad that changes the button and axis
* assignments. We distinguish between them by comparing the product ID.
*/
private static final int XBOX_ONE_S_PRODUCT_ID = 0x02fd;
/**
* The device ID string for Xbox One S gamepads connected over Bluetooth.
*/
private static final String XBOX_WIRELESS_DEVICE_NAME = "Xbox Wireless Controller";
/**
* Set bits indicate that we don't expect the button at mMappedButtons[index] to be mapped.
*/
private BitSet mUnmappedButtons = new BitSet(CanonicalButtonIndex.COUNT);
/**
* Set bits indicate that we don't expect the axis at mMappedAxes[index] to be mapped.
*/
private BitSet mUnmappedAxes = new BitSet(CanonicalAxisIndex.COUNT);
private float[] mMappedButtons = new float[CanonicalButtonIndex.COUNT];
private float[] mMappedAxes = new float[CanonicalAxisIndex.COUNT];
private float[] mRawButtons = new float[GamepadDevice.MAX_RAW_BUTTON_VALUES];
private float[] mRawAxes = new float[GamepadDevice.MAX_RAW_AXIS_VALUES];
@Before
public void setUp() {
// By default, we expect every button and axis to be mapped.
mUnmappedButtons.clear();
mUnmappedAxes.clear();
// Start with all the mapped values as unmapped.
Arrays.fill(mMappedButtons, Float.NaN);
Arrays.fill(mMappedAxes, Float.NaN);
// Set each raw value to something unique.
for (int i = 0; i < GamepadDevice.MAX_RAW_AXIS_VALUES; i++) {
mRawAxes[i] = -i - 1.0f;
}
for (int i = 0; i < GamepadDevice.MAX_RAW_BUTTON_VALUES; i++) {
mRawButtons[i] = i + 1.0f;
}
}
@Test
@Feature({"Gamepad"})
public void testShieldGamepadMappings() {
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.NVIDIA_SHIELD_DEVICE_NAME_PREFIX);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertShieldGamepadMappings(mappings);
}
@Test
@Feature({"Gamepad"})
public void testXBox360GamepadMappings() {
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.MICROSOFT_XBOX_PAD_DEVICE_NAME);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertShieldGamepadMappings(mappings);
}
@Test
@Feature({"Gamepad"})
public void testPS3SixAxisGamepadMappings() {
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.PS_DUALSHOCK_3_SIXAXIS_DEVICE_NAME);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
// Changes in Android 9 caused the DualShock 3 and SIXAXIS mappings to change.
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.PRIMARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_A], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.SECONDARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_B], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.TERTIARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_X], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.QUATERNARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Y], ERROR_TOLERANCE);
} else {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.PRIMARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_X], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.SECONDARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Y], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.TERTIARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_A], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.QUATERNARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_B], ERROR_TOLERANCE);
}
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonDpadButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedTriggerAxesToBottomShoulder();
assertMappedXYAxes();
assertMappedZAndRZAxesToRightStick();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testSamsungEIGP20GamepadMappings() {
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.SAMSUNG_EI_GP20_DEVICE_NAME);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertMappedCommonXYABButtons();
assertMappedUpperTriggerButtonsToBottomShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedRXAndRYAxesToRightStick();
expectNoShoulderButtons();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testAmazonFireGamepadMappings() {
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.AMAZON_FIRE_DEVICE_NAME);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertMappedCommonXYABButtons();
assertMappedPedalAxesToBottomShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedZAndRZAxesToRightStick();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testUnknownXBox360GamepadMappings() {
int[] axes = new int[] {
MotionEvent.AXIS_X,
MotionEvent.AXIS_Y,
MotionEvent.AXIS_Z,
MotionEvent.AXIS_RZ,
MotionEvent.AXIS_LTRIGGER,
MotionEvent.AXIS_RTRIGGER,
MotionEvent.AXIS_HAT_X,
MotionEvent.AXIS_HAT_Y
};
BitSet buttons = new BitSet();
buttons.set(KeyEvent.KEYCODE_BUTTON_A);
buttons.set(KeyEvent.KEYCODE_BUTTON_B);
buttons.set(KeyEvent.KEYCODE_BUTTON_X);
buttons.set(KeyEvent.KEYCODE_BUTTON_Y);
buttons.set(KeyEvent.KEYCODE_BUTTON_L1);
buttons.set(KeyEvent.KEYCODE_BUTTON_R1);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBL);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBR);
buttons.set(KeyEvent.KEYCODE_BUTTON_START);
buttons.set(KeyEvent.KEYCODE_BUTTON_SELECT);
buttons.set(KeyEvent.KEYCODE_BUTTON_MODE);
GamepadMappings mappings = GamepadMappings.getUnknownGamepadMappings(axes, buttons);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertMappedCommonXYABButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedTriggerAxesToBottomShoulder();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedZAndRZAxesToRightStick();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testUnknownMogaProGamepadMappings() {
int[] axes = new int[] {
MotionEvent.AXIS_X,
MotionEvent.AXIS_Y,
MotionEvent.AXIS_Z,
MotionEvent.AXIS_RZ,
MotionEvent.AXIS_BRAKE,
MotionEvent.AXIS_GAS,
MotionEvent.AXIS_HAT_X,
MotionEvent.AXIS_HAT_Y
};
BitSet buttons = new BitSet();
buttons.set(KeyEvent.KEYCODE_BUTTON_A);
buttons.set(KeyEvent.KEYCODE_BUTTON_B);
buttons.set(KeyEvent.KEYCODE_BUTTON_X);
buttons.set(KeyEvent.KEYCODE_BUTTON_Y);
buttons.set(KeyEvent.KEYCODE_BUTTON_L1);
buttons.set(KeyEvent.KEYCODE_BUTTON_R1);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBL);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBR);
buttons.set(KeyEvent.KEYCODE_BUTTON_START);
buttons.set(KeyEvent.KEYCODE_BUTTON_SELECT);
buttons.set(KeyEvent.KEYCODE_BUTTON_MODE);
GamepadMappings mappings = GamepadMappings.getUnknownGamepadMappings(axes, buttons);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertMappedCommonXYABButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedPedalAxesToBottomShoulder();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedZAndRZAxesToRightStick();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testUnknownXiaomiGamepadMappings() {
int[] axes = new int[] {
MotionEvent.AXIS_X,
MotionEvent.AXIS_Y,
MotionEvent.AXIS_RX,
MotionEvent.AXIS_RY,
MotionEvent.AXIS_BRAKE,
MotionEvent.AXIS_THROTTLE,
MotionEvent.AXIS_HAT_X,
MotionEvent.AXIS_HAT_Y
};
BitSet buttons = new BitSet();
buttons.set(KeyEvent.KEYCODE_BUTTON_A);
buttons.set(KeyEvent.KEYCODE_BUTTON_B);
buttons.set(KeyEvent.KEYCODE_BUTTON_X);
buttons.set(KeyEvent.KEYCODE_BUTTON_Y);
buttons.set(KeyEvent.KEYCODE_BUTTON_L1);
buttons.set(KeyEvent.KEYCODE_BUTTON_R1);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBL);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBR);
buttons.set(KeyEvent.KEYCODE_BUTTON_START);
buttons.set(KeyEvent.KEYCODE_BUTTON_SELECT);
buttons.set(KeyEvent.KEYCODE_BUTTON_MODE);
GamepadMappings mappings = GamepadMappings.getUnknownGamepadMappings(axes, buttons);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertMappedCommonXYABButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedAltPedalAxesToBottomShoulder();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedRXAndRYAxesToRightStick();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testUnknownGpdXdGamepadMappings() {
int[] axes = new int[] {
MotionEvent.AXIS_X,
MotionEvent.AXIS_Y,
MotionEvent.AXIS_Z,
MotionEvent.AXIS_RZ
};
BitSet buttons = new BitSet();
buttons.set(KeyEvent.KEYCODE_BUTTON_A);
buttons.set(KeyEvent.KEYCODE_BUTTON_B);
buttons.set(KeyEvent.KEYCODE_BUTTON_X);
buttons.set(KeyEvent.KEYCODE_BUTTON_Y);
buttons.set(KeyEvent.KEYCODE_BUTTON_L1);
buttons.set(KeyEvent.KEYCODE_BUTTON_R1);
buttons.set(KeyEvent.KEYCODE_BUTTON_L2);
buttons.set(KeyEvent.KEYCODE_BUTTON_R2);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBL);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBR);
buttons.set(KeyEvent.KEYCODE_BUTTON_START);
buttons.set(KeyEvent.KEYCODE_BUTTON_SELECT);
buttons.set(KeyEvent.KEYCODE_BUTTON_MODE);
GamepadMappings mappings = GamepadMappings.getUnknownGamepadMappings(axes, buttons);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
assertMappedCommonXYABButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedLowerTriggerButtonsToBottomShoulder();
assertMappedCommonDpadButtons();
assertMappedXYAxes();
assertMappedZAndRZAxesToRightStick();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testUnknownGamepadMappingsNoMeta() {
int[] axes = new int[] {
MotionEvent.AXIS_X, MotionEvent.AXIS_Y, MotionEvent.AXIS_Z, MotionEvent.AXIS_RZ};
BitSet buttons = new BitSet();
buttons.set(KeyEvent.KEYCODE_BUTTON_A);
buttons.set(KeyEvent.KEYCODE_BUTTON_B);
buttons.set(KeyEvent.KEYCODE_BUTTON_X);
buttons.set(KeyEvent.KEYCODE_BUTTON_Y);
buttons.set(KeyEvent.KEYCODE_BUTTON_L1);
buttons.set(KeyEvent.KEYCODE_BUTTON_R1);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBL);
buttons.set(KeyEvent.KEYCODE_BUTTON_THUMBR);
buttons.set(KeyEvent.KEYCODE_BUTTON_START);
buttons.set(KeyEvent.KEYCODE_BUTTON_SELECT);
mRawButtons[KeyEvent.KEYCODE_BUTTON_MODE] = 0.0f;
GamepadMappings mappings = GamepadMappings.getUnknownGamepadMappings(axes, buttons);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
expectNoMetaButton(mappings);
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testPS4GamepadMappings() {
int[] axes;
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
axes = new int[] {MotionEvent.AXIS_X, MotionEvent.AXIS_Y, MotionEvent.AXIS_Z,
MotionEvent.AXIS_RZ, MotionEvent.AXIS_LTRIGGER, MotionEvent.AXIS_RTRIGGER,
MotionEvent.AXIS_HAT_X, MotionEvent.AXIS_HAT_Y};
} else {
axes = new int[] {MotionEvent.AXIS_X, MotionEvent.AXIS_Y, MotionEvent.AXIS_Z,
MotionEvent.AXIS_RX, MotionEvent.AXIS_RY, MotionEvent.AXIS_RZ,
MotionEvent.AXIS_HAT_X, MotionEvent.AXIS_HAT_Y};
}
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.PS_DUALSHOCK_4_PRODUCT_ID,
GamepadMappings.PS_DUALSHOCK_4_VENDOR_ID, axes);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
// Changes in Android 9 caused the DualShock 4 mapping to change.
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.PRIMARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_A], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.SECONDARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_B], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.TERTIARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_X], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.QUATERNARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Y], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_L1], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_R1], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER],
mRawAxes[MotionEvent.AXIS_LTRIGGER], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
mRawAxes[MotionEvent.AXIS_RTRIGGER], ERROR_TOLERANCE);
assertMappedCommonThumbstickButtons();
} else {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.PRIMARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_B], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.SECONDARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_C], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.TERTIARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_A], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.QUATERNARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_X], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Z], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Y], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER],
mRawAxes[MotionEvent.AXIS_RX], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
mRawAxes[MotionEvent.AXIS_RY], ERROR_TOLERANCE);
expectNoThumbstickButtons();
}
assertMappedCommonStartSelectMetaButtons();
assertMappedXYAxes();
assertMappedHatAxisToDpadButtons();
assertMappedZAndRZAxesToRightStick();
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testXboxOneSBluetooth2016FirmwareMappings() {
int[] axes = new int[] {MotionEvent.AXIS_X, MotionEvent.AXIS_Y, MotionEvent.AXIS_Z,
MotionEvent.AXIS_RZ, MotionEvent.AXIS_LTRIGGER, MotionEvent.AXIS_RTRIGGER,
MotionEvent.AXIS_HAT_X, MotionEvent.AXIS_HAT_Y};
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.XBOX_ONE_S_2016_FIRMWARE_PRODUCT_ID,
GamepadMappings.XBOX_ONE_S_2016_FIRMWARE_VENDOR_ID, axes);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.PRIMARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_A], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.SECONDARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_B], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.TERTIARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_C], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.QUATERNARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_X], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Y], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Z], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_THUMBSTICK],
mRawButtons[KeyEvent.KEYCODE_BUTTON_L2], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_THUMBSTICK],
mRawButtons[KeyEvent.KEYCODE_BUTTON_R2], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.BACK_SELECT],
mRawButtons[KeyEvent.KEYCODE_BUTTON_L1], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.START],
mRawButtons[KeyEvent.KEYCODE_BUTTON_R1], ERROR_TOLERANCE);
// The triggers range from -1 to 1 with -1 as the idle value.
float leftTriggerValue = (mRawAxes[MotionEvent.AXIS_Z] + 1.0f) / 2.0f;
float rightTriggerValue = (mRawAxes[MotionEvent.AXIS_RZ] + 1.0f) / 2.0f;
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER], leftTriggerValue,
ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER], rightTriggerValue,
ERROR_TOLERANCE);
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedRXAndRYAxesToRightStick();
expectNoMetaButton(mappings);
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testXboxOneSBluetoothUsesDefaultMappings() {
// Test that Xbox One S gamepads with updated firmware connected over Bluetooth use the
// default mapping.
int[] axes = new int[] {MotionEvent.AXIS_X, MotionEvent.AXIS_Y, MotionEvent.AXIS_Z,
MotionEvent.AXIS_RZ, MotionEvent.AXIS_LTRIGGER, MotionEvent.AXIS_RTRIGGER,
MotionEvent.AXIS_HAT_X, MotionEvent.AXIS_HAT_Y};
GamepadMappings deviceIdMappings = GamepadMappings.getMappings(
XBOX_ONE_S_PRODUCT_ID, GamepadMappings.XBOX_ONE_S_2016_FIRMWARE_VENDOR_ID, axes);
Assert.assertNull(deviceIdMappings);
GamepadMappings deviceNameMappings = GamepadMappings.getMappings(XBOX_WIRELESS_DEVICE_NAME);
Assert.assertNull(deviceNameMappings);
}
@Test
@Feature({"Gamepad"})
public void testIDroidConGamepadMappingsDigital() {
int[] axes = {
MotionEvent.AXIS_X,
MotionEvent.AXIS_Y,
MotionEvent.AXIS_Z,
MotionEvent.AXIS_RZ,
MotionEvent.AXIS_HAT_X,
MotionEvent.AXIS_HAT_Y,
};
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.SNAKEBYTE_IDROIDCON_PRODUCT_ID,
GamepadMappings.BROADCOM_VENDOR_ID, axes);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
expectNoMetaButton(mappings);
assertMappedCommonXYABButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedLowerTriggerButtonsToBottomShoulder();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedZAndRZAxesToRightStick();
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.START],
mRawButtons[KeyEvent.KEYCODE_BUTTON_START], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.BACK_SELECT],
mRawButtons[KeyEvent.KEYCODE_BUTTON_SELECT], ERROR_TOLERANCE);
assertMapping(mappings);
}
@Test
@Feature({"Gamepad"})
public void testIDroidConGamepadMappingsAnalog() {
int[] axes = {
MotionEvent.AXIS_X,
MotionEvent.AXIS_Y,
MotionEvent.AXIS_Z,
MotionEvent.AXIS_RX,
MotionEvent.AXIS_RY,
MotionEvent.AXIS_HAT_X,
MotionEvent.AXIS_HAT_Y,
};
GamepadMappings mappings =
GamepadMappings.getMappings(GamepadMappings.SNAKEBYTE_IDROIDCON_PRODUCT_ID,
GamepadMappings.BROADCOM_VENDOR_ID, axes);
mappings.mapToStandardGamepad(mMappedAxes, mMappedButtons, mRawAxes, mRawButtons);
expectNoMetaButton(mappings);
assertMappedCommonXYABButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedRXAndRYAxesToRightStick();
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.START],
mRawButtons[KeyEvent.KEYCODE_BUTTON_START], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.BACK_SELECT],
mRawButtons[KeyEvent.KEYCODE_BUTTON_SELECT], ERROR_TOLERANCE);
Assert.assertEquals(
mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER], 0.0, ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
-mRawAxes[MotionEvent.AXIS_Z], ERROR_TOLERANCE);
assertMapping(mappings);
}
/**
* Asserts that the current gamepad mapping being tested matches the shield mappings.
*/
public void assertShieldGamepadMappings(GamepadMappings mappings) {
assertMappedCommonXYABButtons();
assertMappedTriggerButtonsToTopShoulder();
assertMappedCommonThumbstickButtons();
assertMappedCommonStartSelectMetaButtons();
assertMappedTriggerAxesToBottomShoulder();
assertMappedHatAxisToDpadButtons();
assertMappedXYAxes();
assertMappedZAndRZAxesToRightStick();
assertMapping(mappings);
}
public void expectNoShoulderButtons() {
mUnmappedButtons.set(CanonicalButtonIndex.LEFT_SHOULDER);
mUnmappedButtons.set(CanonicalButtonIndex.RIGHT_SHOULDER);
}
public void expectNoMetaButton(GamepadMappings mappings) {
mUnmappedButtons.set(CanonicalButtonIndex.META);
assertThat(mappings.getButtonsLength(), lessThan(CanonicalButtonIndex.COUNT));
}
public void expectNoThumbstickButtons() {
mUnmappedButtons.set(CanonicalButtonIndex.LEFT_THUMBSTICK);
mUnmappedButtons.set(CanonicalButtonIndex.RIGHT_THUMBSTICK);
}
public void assertMapping(GamepadMappings mappings) {
for (int i = 0; i < mMappedAxes.length; i++) {
if (mUnmappedAxes.get(i)) {
Assert.assertTrue(
"An unexpected axis was mapped at index " + i, Float.isNaN(mMappedAxes[i]));
} else {
Assert.assertFalse(
"An axis was not mapped at index " + i, Float.isNaN(mMappedAxes[i]));
}
}
assertThat(mMappedButtons.length, greaterThanOrEqualTo(mappings.getButtonsLength()));
for (int i = 0; i < mappings.getButtonsLength(); i++) {
if (mUnmappedButtons.get(i)) {
Assert.assertTrue("An unexpected button was mapped at index " + i,
Float.isNaN(mMappedButtons[i]));
} else {
Assert.assertFalse(
"A button was not mapped at index " + i, Float.isNaN(mMappedButtons[i]));
}
}
}
private void assertMappedUpperTriggerButtonsToBottomShoulder() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_L1], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_R1], ERROR_TOLERANCE);
}
private void assertMappedLowerTriggerButtonsToBottomShoulder() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_L2], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_R2], ERROR_TOLERANCE);
}
private void assertMappedCommonDpadButtons() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_DOWN],
mRawButtons[KeyEvent.KEYCODE_DPAD_DOWN], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_UP],
mRawButtons[KeyEvent.KEYCODE_DPAD_UP], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_LEFT],
mRawButtons[KeyEvent.KEYCODE_DPAD_LEFT], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_RIGHT],
mRawButtons[KeyEvent.KEYCODE_DPAD_RIGHT], ERROR_TOLERANCE);
}
private void assertMappedTriggerButtonsToTopShoulder() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_L1], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_SHOULDER],
mRawButtons[KeyEvent.KEYCODE_BUTTON_R1], ERROR_TOLERANCE);
}
private void assertMappedCommonXYABButtons() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.PRIMARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_A], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.SECONDARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_B], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.TERTIARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_X], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.QUATERNARY],
mRawButtons[KeyEvent.KEYCODE_BUTTON_Y], ERROR_TOLERANCE);
}
private void assertMappedCommonThumbstickButtons() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_THUMBSTICK],
mRawButtons[KeyEvent.KEYCODE_BUTTON_THUMBL], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_THUMBSTICK],
mRawButtons[KeyEvent.KEYCODE_BUTTON_THUMBR], ERROR_TOLERANCE);
}
private void assertMappedCommonStartSelectMetaButtons() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.START],
mRawButtons[KeyEvent.KEYCODE_BUTTON_START], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.BACK_SELECT],
mRawButtons[KeyEvent.KEYCODE_BUTTON_SELECT], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.META],
mRawButtons[KeyEvent.KEYCODE_BUTTON_MODE], ERROR_TOLERANCE);
}
private void assertMappedPedalAxesToBottomShoulder() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER],
mRawAxes[MotionEvent.AXIS_BRAKE], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
mRawAxes[MotionEvent.AXIS_GAS], ERROR_TOLERANCE);
}
private void assertMappedAltPedalAxesToBottomShoulder() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER],
mRawAxes[MotionEvent.AXIS_BRAKE], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
mRawAxes[MotionEvent.AXIS_THROTTLE], ERROR_TOLERANCE);
}
private void assertMappedTriggerAxesToBottomShoulder() {
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.LEFT_TRIGGER],
mRawAxes[MotionEvent.AXIS_LTRIGGER], ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.RIGHT_TRIGGER],
mRawAxes[MotionEvent.AXIS_RTRIGGER], ERROR_TOLERANCE);
}
private void assertMappedHatAxisToDpadButtons() {
float hatX = mRawAxes[MotionEvent.AXIS_HAT_X];
float hatY = mRawAxes[MotionEvent.AXIS_HAT_Y];
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_LEFT],
GamepadMappings.negativeAxisValueAsButton(hatX), ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_RIGHT],
GamepadMappings.positiveAxisValueAsButton(hatX), ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_UP],
GamepadMappings.negativeAxisValueAsButton(hatY), ERROR_TOLERANCE);
Assert.assertEquals(mMappedButtons[CanonicalButtonIndex.DPAD_DOWN],
GamepadMappings.positiveAxisValueAsButton(hatY), ERROR_TOLERANCE);
}
private void assertMappedXYAxes() {
Assert.assertEquals(mMappedAxes[CanonicalAxisIndex.LEFT_STICK_X],
mRawAxes[MotionEvent.AXIS_X], ERROR_TOLERANCE);
Assert.assertEquals(mMappedAxes[CanonicalAxisIndex.LEFT_STICK_Y],
mRawAxes[MotionEvent.AXIS_Y], ERROR_TOLERANCE);
}
private void assertMappedRXAndRYAxesToRightStick() {
Assert.assertEquals(mMappedAxes[CanonicalAxisIndex.RIGHT_STICK_X],
mRawAxes[MotionEvent.AXIS_RX], ERROR_TOLERANCE);
Assert.assertEquals(mMappedAxes[CanonicalAxisIndex.RIGHT_STICK_Y],
mRawAxes[MotionEvent.AXIS_RY], ERROR_TOLERANCE);
}
private void assertMappedZAndRZAxesToRightStick() {
Assert.assertEquals(mMappedAxes[CanonicalAxisIndex.RIGHT_STICK_X],
mRawAxes[MotionEvent.AXIS_Z], ERROR_TOLERANCE);
Assert.assertEquals(mMappedAxes[CanonicalAxisIndex.RIGHT_STICK_Y],
mRawAxes[MotionEvent.AXIS_RZ], ERROR_TOLERANCE);
}
}
|
package multiton;
import multiton.entity.TrainingRoom;
/**
* 负责分配训练室的类
*/
public class RoomDistributor {
/**
* 调用此方法分配训练室
*
* @return 分配到的TrainingRoom实例
*/
public TrainingRoom applyRoom() {
// 确认当前是否有空训练室
if (TrainingRoom.usingCount >= 3) {
// 已使用训练室大于等于3,无空房间
System.out.println("Sorry, no empty room is left.");
return null;
}
// 有空房间,逐个检测是否可申请
for (int i = 1; i <= 3; i++) {
TrainingRoom trainingRoom = TrainingRoom.getRoom(i);
// getRoom成功,申请到了训练室
if (trainingRoom != null) {
System.out.println("Success.");
return trainingRoom;
}
}
return null;
}
}
|
package rs.math.oop1.z090402.solid.z06.dobarOPovrsine;
import java.util.Scanner;
import static java.lang.System.in;
import static java.lang.System.out;
public class PokreniPovrsine {
public static void main(String[] argumenti){
Scanner sc = new Scanner(in);
out.printf("Unesite poluprecnik kruga: ");
double r = sc.nextDouble();
Krug k = new Krug(r);
out.printf("Povrsina kruga je: %f\n", k.povrsina());
out.printf("Unesite sirinu i visinu pravougaonika: ");
double a = sc.nextDouble();
double b = sc.nextDouble();
Pravougaonik p = new Pravougaonik(a,b);
out.printf("Povrsina pravougaonika je: %f\n\n", p.povrsina());
sc.close();
Mera[] oblici = {k,p};
for(Mera o: oblici){
out.printf("Povrsina oblika je: %f\n", o.povrsina());
}
}
}
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package git4idea.history;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsActions;
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.annotate.ShowAllAffectedGenericAction;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.history.*;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Processor;
import com.intellij.util.ui.ColumnInfo;
import com.intellij.vcs.history.VcsHistoryProviderEx;
import com.intellij.vcs.log.Hash;
import com.intellij.vcsUtil.VcsUtil;
import git4idea.GitRevisionNumber;
import git4idea.GitUtil;
import git4idea.GitVcs;
import git4idea.changes.GitChangeUtils;
import git4idea.log.GitShowCommitInLogAction;
import git4idea.repo.GitRepository;
import git4idea.repo.GitRepositoryManager;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Collections;
import java.util.List;
import static com.intellij.util.containers.ContainerUtil.getFirstItem;
/**
* Git history provider implementation
*/
public class GitHistoryProvider implements VcsHistoryProviderEx,
VcsCacheableHistorySessionFactory<Boolean, VcsAbstractHistorySession>,
VcsBaseRevisionAdviser {
private static final Logger LOG = Logger.getInstance(GitHistoryProvider.class.getName());
@NotNull private final Project myProject;
public GitHistoryProvider(@NotNull Project project) {
myProject = project;
}
@Override
public VcsDependentHistoryComponents getUICustomization(final VcsHistorySession session, JComponent forShortcutRegistration) {
return VcsDependentHistoryComponents.createOnlyColumns(ColumnInfo.EMPTY_ARRAY);
}
@Override
public AnAction[] getAdditionalActions(Runnable refresher) {
return new AnAction[]{
ShowAllAffectedGenericAction.getInstance(),
ActionManager.getInstance().getAction(VcsActions.ACTION_COPY_REVISION_NUMBER),
new GitShowCommitInLogAction()};
}
@Override
public boolean isDateOmittable() {
return false;
}
@Override
@Nullable
public String getHelpId() {
return null;
}
@Override
public FilePath getUsedFilePath(VcsAbstractHistorySession session) {
return null;
}
@Override
public Boolean getAddinionallyCachedData(VcsAbstractHistorySession session) {
return null;
}
@Override
public VcsAbstractHistorySession createFromCachedData(Boolean aBoolean,
@NotNull List<VcsFileRevision> revisions,
@NotNull FilePath filePath,
VcsRevisionNumber currentRevision) {
return createSession(filePath, revisions, currentRevision);
}
@Override
@Nullable
public VcsAbstractHistorySession createSessionFor(final FilePath filePath) throws VcsException {
List<VcsFileRevision> revisions = GitFileHistory.collectHistory(myProject, filePath);
return createSession(filePath, revisions, revisions.isEmpty() ? null : getFirstItem(revisions).getRevisionNumber());
}
private VcsAbstractHistorySession createSession(final FilePath filePath, final List<VcsFileRevision> revisions,
@Nullable final VcsRevisionNumber number) {
return new VcsAbstractHistorySession(revisions, number) {
@Override
@Nullable
protected VcsRevisionNumber calcCurrentRevisionNumber() {
try {
return GitHistoryUtils.getCurrentRevision(myProject, filePath, "HEAD");
}
catch (VcsException e) {
// likely the file is not under VCS anymore.
if (LOG.isDebugEnabled()) {
LOG.debug("Unable to retrieve the current revision number", e);
}
return null;
}
}
@Override
public HistoryAsTreeProvider getHistoryAsTreeProvider() {
return null;
}
@Override
public VcsHistorySession copy() {
return createSession(filePath, getRevisionList(), getCurrentRevisionNumber());
}
};
}
@Nullable
@Override
public VcsFileRevision getLastRevision(FilePath filePath) throws VcsException {
List<VcsFileRevision> history = GitFileHistory.collectHistory(myProject, filePath, "--max-count=1");
if (history.isEmpty()) return null;
return history.get(0);
}
@Override
public boolean getBaseVersionContent(FilePath filePath, Processor<String> processor, String beforeVersionId) throws VcsException {
if (StringUtil.isEmptyOrSpaces(beforeVersionId) || filePath.getVirtualFile() == null) return false;
// apply if base revision id matches revision
final VirtualFile root = GitUtil.getGitRoot(filePath);
if (root == null) return false;
Hash hash = GitChangeUtils.commitExists(myProject, root, beforeVersionId, null, "HEAD");
if (hash == null) {
throw new VcsException("Can not apply patch to " + filePath.getPath() + ".\nCan not find revision '" + beforeVersionId + "'.");
}
final ContentRevision content = GitVcs.getInstance(myProject).getDiffProvider()
.createFileContent(new GitRevisionNumber(hash.asString()), filePath.getVirtualFile());
if (content == null) {
throw new VcsException("Can not load content of '" + filePath.getPath() + "' for revision '" + hash.asString() + "'");
}
return !processor.process(content.getContent());
}
@Override
public void reportAppendableHistory(FilePath path, VcsAppendableHistorySessionPartner partner) {
reportAppendableHistory(path, null, partner);
}
@Override
public void reportAppendableHistory(@NotNull FilePath path,
@Nullable VcsRevisionNumber startingRevision,
@NotNull final VcsAppendableHistorySessionPartner partner) {
final VcsAbstractHistorySession emptySession = createSession(path, Collections.emptyList(), null);
partner.reportCreatedEmptySession(emptySession);
VcsConfiguration vcsConfiguration = VcsConfiguration.getInstance(myProject);
String[] additionalArgs = vcsConfiguration.LIMIT_HISTORY ?
new String[]{"--max-count=" + vcsConfiguration.MAXIMUM_HISTORY_ROWS} :
ArrayUtil.EMPTY_STRING_ARRAY;
GitFileHistory.loadHistory(myProject, refreshPath(path), null, startingRevision,
fileRevision -> partner.acceptRevision(fileRevision),
exception -> partner.reportException(exception),
additionalArgs);
}
/**
* Refreshes the IO File inside this FilePath to let it survive moves.
*/
@NotNull
private static FilePath refreshPath(@NotNull FilePath path) {
VirtualFile virtualFile = path.getVirtualFile();
if (virtualFile == null) {
return path;
}
return VcsUtil.getFilePath(virtualFile);
}
@Override
public boolean supportsHistoryForDirectories() {
return true;
}
@Override
public DiffFromHistoryHandler getHistoryDiffHandler() {
return new GitDiffFromHistoryHandler(myProject);
}
@Override
public boolean canShowHistoryFor(@NotNull VirtualFile file) {
GitRepositoryManager manager = GitUtil.getRepositoryManager(myProject);
GitRepository repository = manager.getRepositoryForFileQuick(file);
return repository != null && !repository.isFresh();
}
}
|
/*******************************************************************************
* Copyright (c) 2004 - 2006 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.report.designer.util;
import java.util.Comparator;
import org.eclipse.birt.report.model.api.DesignElementHandle;
import com.ibm.icu.text.Collator;
/**
* The comparator for design element
*/
public class DesignElementComparator implements Comparator
{
private boolean ascending = true;
private int ret = 0;
/**
* Compare the two objects
* @param o1 object1
* @param 02 object2
* @return the compare result
*/
public int compare( Object o1, Object o2 )
{
String name1 = null;
String name2 = null;
if ( o1 instanceof DesignElementHandle
&& o2 instanceof DesignElementHandle )
{
name1 = ( (DesignElementHandle) o1 ).getDefn( ).getName( );
name2 = ( (DesignElementHandle) o2 ).getDefn( ).getName( );
if ( ascending )
{
ret = Collator.getInstance( ).compare( name1, name2 );
}
else
{
ret = Collator.getInstance( ).compare( name2, name1 );
}
if ( ret != 0 )
{
return ret;
}
// if ret == 0
AlphabeticallyComparator comparator = new AlphabeticallyComparator( );
return comparator.compare( o1, o2 );
}
return 0;
}
}
|
package frc.team2412.robot.commands.intake;
import edu.wpi.first.wpilibj2.command.CommandBase;
import frc.team2412.robot.subsystem.IntakeSubsystem;
public class IntakeStopCommand extends CommandBase {
private final IntakeSubsystem subsystem;
public IntakeStopCommand(IntakeSubsystem subsystem) {
this.subsystem = subsystem;
addRequirements(subsystem);
}
@Override
public void execute() {
subsystem.intakeStop();
}
@Override
public boolean isFinished() {
return true;
}
}
|
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.orient.impl.property;
import javax.inject.Named;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import net.sf.mmm.property.api.WritableProperty;
import net.sf.mmm.property.api.lang.ReadableStringProperty;
import net.sf.mmm.property.api.lang.StringProperty;
import net.sf.mmm.util.reflect.api.GenericType;
/**
* The implementation of {@link SinglePropertyBuilder} for {@link OType#STRING}.
*
* @author hohwille
* @since 1.0.0
*/
@Named
public class SinglePropertyBuilderString implements SinglePropertyBuilder<String> {
/**
* The constructor.
*/
public SinglePropertyBuilderString() {
super();
}
@Override
public OType getType() {
return OType.STRING;
}
@Override
public Class<String> getValueClass() {
return String.class;
}
@Override
public Class<? extends WritableProperty<String>> getPropertyType(OProperty oProperty) {
return StringProperty.class;
}
@Override
public GenericType<String> getValueType(OProperty oProperty) {
return ReadableStringProperty.TYPE;
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.datayoo.moql.operand.expression.relation;
import org.datayoo.moql.EntityMap;
import org.datayoo.moql.Operand;
import org.datayoo.moql.metadata.OperatorType;
import org.datayoo.moql.operand.constant.NullConstant;
/**
*
* @author Tang Tadin
*
*/
public class IsExpression extends AbstractRelationExpression {
protected static NullConstant NULL = new NullConstant();
public IsExpression(Operand operand) {
super(OperatorType.BINARY, RelationOperator.IS, operand, NULL);
// TODO Auto-generated constructor stub
}
/* (non-Javadoc)
* @see org.moql.operand.Operand#operate(org.moql.data.EntityMap)
*/
@Override
public Object operate(EntityMap entityMap) {
// TODO Auto-generated method stub
return booleanOperate(entityMap);
}
@Override
public boolean booleanOperate(EntityMap entityMap) {
// TODO Auto-generated method stub
Object value = lOperand.operate(entityMap);
if (value == null)
return true;
return false;
}
}
|
package mage.cards.c;
import java.util.UUID;
import mage.MageInt;
import mage.abilities.common.DealsCombatDamageToAPlayerTriggeredAbility;
import mage.abilities.common.SimpleActivatedAbility;
import mage.abilities.costs.mana.ManaCostsImpl;
import mage.abilities.effects.common.RegenerateSourceEffect;
import mage.abilities.effects.common.discard.DiscardTargetEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.SubType;
import mage.constants.Zone;
/**
*
* @author LoneFox
*/
public final class ChillingApparition extends CardImpl {
public ChillingApparition(UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{2}{B}");
this.subtype.add(SubType.SPIRIT);
this.power = new MageInt(1);
this.toughness = new MageInt(1);
// {B}: Regenerate Chilling Apparition.
this.addAbility(new SimpleActivatedAbility(Zone.BATTLEFIELD, new RegenerateSourceEffect(), new ManaCostsImpl<>("{B}")));
// Whenever Chilling Apparition deals combat damage to a player, that player discards a card.
this.addAbility(new DealsCombatDamageToAPlayerTriggeredAbility(new DiscardTargetEffect(1), false, true));
}
private ChillingApparition(final ChillingApparition card) {
super(card);
}
@Override
public ChillingApparition copy() {
return new ChillingApparition(this);
}
}
|
/*
* BSD 3-Clause License
*
* Copyright (c) 2021, Yusuf Arfan Ismail
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
*
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.yusuf.bot.slash_commands.github_commands;
import net.dv8tion.jda.api.events.interaction.SlashCommandEvent;
import net.dv8tion.jda.api.interactions.commands.build.CommandData;
import net.yusuf.bot.slash_commands.Command;
public class ForgeGithub extends Command {
@Override
public void onSlashCommand(SlashCommandEvent event) {
event.reply("https://github.com/MinecraftForge").queue(); // reply immediately
}
@Override
public String getName() {
return "forge_github";
}
@Override
public String getDescription() {
return "Shows Github link";
}
@Override
public CommandData getCommandData() {
return new CommandData(getName(), getDescription());
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
*
* @author agentmilindu
*/
@WebServlet(name="GetFile", urlPatterns={"/GetFile"})
public class GetFile extends HttpServlet {
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
PrintWriter out = response.getWriter();
try {
/* TODO output your page here
out.println("<html>");
out.println("<head>");
out.println("<title>Servlet GetFile</title>");
out.println("</head>");
out.println("<body>");
out.println("<h1>Servlet GetFile at " + request.getContextPath () + "</h1>");
out.println("</body>");
out.println("</html>");
*/
} finally {
out.close();
}
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP <code>GET</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP <code>POST</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
|
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.systemtest;
import io.fabric8.kubernetes.api.model.Container;
import io.fabric8.kubernetes.api.model.EnvVar;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.Quantity;
import io.fabric8.kubernetes.api.model.rbac.ClusterRoleBinding;
import io.strimzi.api.kafka.Crds;
import io.strimzi.api.kafka.model.Kafka;
import io.strimzi.api.kafka.model.KafkaResources;
import io.strimzi.api.kafka.model.status.Condition;
import io.strimzi.operator.common.model.Labels;
import io.strimzi.systemtest.interfaces.IndicativeSentences;
import io.strimzi.systemtest.logs.TestExecutionWatcher;
import io.strimzi.systemtest.resources.kubernetes.ClusterRoleBindingResource;
import io.strimzi.systemtest.resources.kubernetes.NetworkPolicyResource;
import io.strimzi.systemtest.resources.kubernetes.RoleBindingResource;
import io.strimzi.systemtest.resources.operator.BundleResource;
import io.strimzi.systemtest.resources.specific.HelmResource;
import io.strimzi.systemtest.resources.specific.OlmResource;
import io.strimzi.systemtest.resources.ResourceManager;
import io.strimzi.systemtest.templates.kubernetes.ClusterRoleBindingTemplates;
import io.strimzi.systemtest.utils.StUtils;
import io.strimzi.systemtest.utils.kafkaUtils.KafkaTopicUtils;
import io.strimzi.systemtest.utils.kafkaUtils.KafkaUserUtils;
import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils;
import io.strimzi.test.TestUtils;
import io.strimzi.test.executor.Exec;
import io.strimzi.test.interfaces.TestSeparator;
import io.strimzi.test.k8s.KubeClusterResource;
import io.strimzi.test.k8s.cluster.Minishift;
import io.strimzi.test.k8s.cluster.OpenShift;
import io.strimzi.test.timemeasuring.TimeMeasuringSystem;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayNameGeneration;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.ExtensionContext;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.Stack;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static io.strimzi.systemtest.matchers.Matchers.logHasNoUnexpectedErrors;
import static io.strimzi.test.k8s.KubeClusterResource.cmdKubeClient;
import static io.strimzi.test.k8s.KubeClusterResource.kubeClient;
import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@ExtendWith(TestExecutionWatcher.class)
@DisplayNameGeneration(IndicativeSentences.class)
public abstract class AbstractST implements TestSeparator {
static {
Crds.registerCustomKinds();
}
public static final List<String> LB_FINALIZERS;
static {
LB_FINALIZERS = Environment.LB_FINALIZERS ? List.of(Constants.LOAD_BALANCER_CLEANUP) : null;
}
protected final ResourceManager resourceManager = ResourceManager.getInstance();
protected final HelmResource helmResource = new HelmResource();
protected OlmResource olmResource;
protected KubeClusterResource cluster;
protected static TimeMeasuringSystem timeMeasuringSystem = TimeMeasuringSystem.getInstance();
private static final Logger LOGGER = LogManager.getLogger(AbstractST.class);
private final Object lock = new Object();
private final Object lockForTimeMeasuringSystem = new Object();
// maps for local variables {thread safe}
protected static Map<String, String> mapWithClusterNames = new HashMap<>();
protected static Map<String, String> mapWithTestTopics = new HashMap<>();
protected static Map<String, String> mapWithTestUsers = new HashMap<>();
protected static Map<String, String> mapWithKafkaClientNames = new HashMap<>();
private AtomicInteger counterOfNamespaces = new AtomicInteger(0);
protected static final String CLUSTER_NAME_PREFIX = "my-cluster-";
protected static final String KAFKA_IMAGE_MAP = "STRIMZI_KAFKA_IMAGES";
protected static final String KAFKA_CONNECT_IMAGE_MAP = "STRIMZI_KAFKA_CONNECT_IMAGES";
protected static final String KAFKA_MIRROR_MAKER_2_IMAGE_MAP = "STRIMZI_KAFKA_MIRROR_MAKER_2_IMAGES";
protected static final String TO_IMAGE = "STRIMZI_DEFAULT_TOPIC_OPERATOR_IMAGE";
protected static final String UO_IMAGE = "STRIMZI_DEFAULT_USER_OPERATOR_IMAGE";
protected static final String KAFKA_INIT_IMAGE = "STRIMZI_DEFAULT_KAFKA_INIT_IMAGE";
protected static final String TLS_SIDECAR_EO_IMAGE = "STRIMZI_DEFAULT_TLS_SIDECAR_ENTITY_OPERATOR_IMAGE";
protected static final String TEST_TOPIC_NAME = "test-topic";
private Stack<String> clusterOperatorConfigs = new Stack<>();
public static final String CO_INSTALL_DIR = TestUtils.USER_PATH + "/../packaging/install/cluster-operator";
public static Random rng = new Random();
public static final int MESSAGE_COUNT = 100;
public static final String USER_NAME = KafkaUserUtils.generateRandomNameOfKafkaUser();
public static final String TOPIC_NAME = KafkaTopicUtils.generateRandomNameOfTopic();
/**
* This method install Strimzi Cluster Operator based on environment variable configuration.
* It can install operator by classic way (apply bundle yamls) or use OLM. For OLM you need to set all other OLM env variables.
* Don't use this method in tests, where specific configuration of CO is needed.
* @param namespace namespace where CO should be installed into
*/
protected void installClusterOperator(ExtensionContext extensionContext, String clusterOperatorName, String namespace, List<String> bindingsNamespaces, long operationTimeout, long reconciliationInterval) {
if (Environment.isOlmInstall()) {
LOGGER.info("Going to install ClusterOperator via OLM");
cluster.setNamespace(namespace);
cluster.createNamespace(namespace);
olmResource = new OlmResource(namespace);
olmResource.create(extensionContext, namespace, operationTimeout, reconciliationInterval);
} else if (Environment.isHelmInstall()) {
LOGGER.info("Going to install ClusterOperator via Helm");
cluster.setNamespace(namespace);
cluster.createNamespace(namespace);
helmResource.create(extensionContext, operationTimeout, reconciliationInterval);
} else {
LOGGER.info("Going to install ClusterOperator via Yaml bundle");
prepareEnvForOperator(extensionContext, namespace, bindingsNamespaces);
if (Environment.isNamespaceRbacScope()) {
// if roles only, only deploy the rolebindings
applyRoleBindings(extensionContext, namespace, namespace);
} else {
applyBindings(extensionContext, namespace, bindingsNamespaces);
}
// 060-Deployment
ResourceManager.setCoDeploymentName(clusterOperatorName);
ResourceManager.getInstance().createResource(extensionContext, BundleResource.clusterOperator(clusterOperatorName, namespace, namespace, operationTimeout, reconciliationInterval).build());
}
}
protected void installClusterOperator(ExtensionContext extensionContext, String clusterOperatorName, String namespace, long operationTimeout, long reconciliationInterval) {
installClusterOperator(extensionContext, clusterOperatorName, namespace, Collections.singletonList(namespace), operationTimeout, reconciliationInterval);
}
protected void installClusterOperator(ExtensionContext extensionContext, String namespace, long operationTimeout, long reconciliationInterval) {
installClusterOperator(extensionContext, Constants.STRIMZI_DEPLOYMENT_NAME, namespace, operationTimeout, reconciliationInterval);
}
protected void installClusterOperator(ExtensionContext extensionContext, String name, String namespace) {
installClusterOperator(extensionContext, name, namespace, Constants.CO_OPERATION_TIMEOUT_DEFAULT, Constants.RECONCILIATION_INTERVAL);
}
protected void installClusterOperator(ExtensionContext extensionContext, String namespace, long operationTimeout) {
installClusterOperator(extensionContext, Constants.STRIMZI_DEPLOYMENT_NAME, namespace, operationTimeout, Constants.RECONCILIATION_INTERVAL);
}
protected void installClusterOperator(ExtensionContext extensionContext, String namespace) {
installClusterOperator(extensionContext, Constants.STRIMZI_DEPLOYMENT_NAME, namespace, Constants.CO_OPERATION_TIMEOUT_DEFAULT, Constants.RECONCILIATION_INTERVAL);
}
public synchronized void installClusterWideClusterOperator(ExtensionContext extensionContext, String namespace, long operationTimeout, long reconciliationInterval) {
prepareEnvForOperator(extensionContext, namespace);
// Apply role bindings in CO namespace
applyBindings(extensionContext, namespace);
// Create ClusterRoleBindings that grant cluster-wide access to all OpenShift projects
List<ClusterRoleBinding> clusterRoleBindingList = ClusterRoleBindingTemplates.clusterRoleBindingsForAllNamespaces(namespace);
clusterRoleBindingList.forEach(clusterRoleBinding ->
ClusterRoleBindingResource.clusterRoleBinding(extensionContext, clusterRoleBinding));
// 060-Deployment
resourceManager.createResource(extensionContext, BundleResource.clusterOperator(namespace, "*", operationTimeout, reconciliationInterval).build());
}
/**
* Perform application of ServiceAccount, Roles and CRDs needed for proper cluster operator deployment.
* Configuration files are loaded from packaging/install/cluster-operator directory.
*/
public void applyClusterOperatorInstallFiles(String namespace) {
clusterOperatorConfigs.clear();
List<File> operatorFiles = Arrays.stream(new File(CO_INSTALL_DIR).listFiles()).sorted()
.filter(File::isFile)
.filter(file ->
!file.getName().matches(".*(Binding|Deployment)-.*"))
.collect(Collectors.toList());
for (File operatorFile : operatorFiles) {
File createFile = operatorFile;
if (operatorFile.getName().contains("ClusterRole-")) {
createFile = switchClusterRolesToRolesIfNeeded(createFile);
}
LOGGER.info("Creating configuration file: {}", createFile.getAbsolutePath());
cmdKubeClient().namespace(namespace).createOrReplace(createFile);
clusterOperatorConfigs.push(createFile.getPath());
}
}
/**
* Replace all references to ClusterRole to Role.
* This includes ClusterRoles themselves as well as RoleBindings that reference them.
*/
public static File switchClusterRolesToRolesIfNeeded(File oldFile) {
if (Environment.isNamespaceRbacScope()) {
try {
File tmpFile = File.createTempFile("rbac-" + oldFile.getName().replace(".yaml", ""), ".yaml");
TestUtils.writeFile(tmpFile.getAbsolutePath(), TestUtils.readFile(oldFile).replace("ClusterRole", "Role"));
LOGGER.info("Replaced ClusterRole for Role in {}", oldFile.getAbsolutePath());
return tmpFile;
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
return oldFile;
}
}
/**
* Delete ServiceAccount, Roles and CRDs from kubernetes cluster.
*/
public void deleteClusterOperatorInstallFiles() {
while (!clusterOperatorConfigs.empty()) {
String clusterOperatorConfig = clusterOperatorConfigs.pop();
LOGGER.info("Deleting configuration file: {}", clusterOperatorConfig);
cmdKubeClient().delete(clusterOperatorConfig);
}
}
/**
* Prepare environment for cluster operator which includes creation of namespaces, custom resources and operator
* specific config files such as ServiceAccount, Roles and CRDs.
* @param clientNamespace namespace which will be created and used as default by kube client
* @param namespaces list of namespaces which will be created
* @param resources list of path to yaml files with resources specifications
*/
protected void prepareEnvForOperator(ExtensionContext extensionContext, String clientNamespace, List<String> namespaces, String... resources) {
assumeTrue(!Environment.isHelmInstall() && !Environment.isOlmInstall());
cluster.createNamespaces(extensionContext, clientNamespace, namespaces);
cluster.createCustomResources(resources);
applyClusterOperatorInstallFiles(clientNamespace);
NetworkPolicyResource.applyDefaultNetworkPolicySettings(extensionContext, namespaces);
if (cluster.cluster() instanceof Minishift || cluster.cluster() instanceof OpenShift) {
// This is needed in case you are using internal kubernetes registry and you want to pull images from there
if (kubeClient().getNamespace(Environment.STRIMZI_ORG) != null) {
for (String namespace : namespaces) {
LOGGER.debug("Setting group policy for Openshift registry in namespace: " + namespace);
Exec.exec(null, Arrays.asList("oc", "policy", "add-role-to-group", "system:image-puller", "system:serviceaccounts:" + namespace, "-n", Environment.STRIMZI_ORG), 0, false, false);
}
}
}
}
/**
* Prepare environment for cluster operator which includes creation of namespaces, custom resources and operator
* specific config files such as ServiceAccount, Roles and CRDs.
* @param clientNamespace namespace which will be created and used as default by kube client
* @param resources list of path to yaml files with resources specifications
*/
protected void prepareEnvForOperator(ExtensionContext extensionContext, String clientNamespace, String... resources) {
prepareEnvForOperator(extensionContext, clientNamespace, Collections.singletonList(clientNamespace), resources);
}
/**
* Prepare environment for cluster operator which includes creation of namespaces, custom resources and operator
* specific config files such as ServiceAccount, Roles and CRDs.
* @param clientNamespace namespace which will be created and used as default by kube client
*/
protected void prepareEnvForOperator(ExtensionContext extensionContext, String clientNamespace) {
prepareEnvForOperator(extensionContext, clientNamespace, Collections.singletonList(clientNamespace));
}
/**
* Clear cluster from all created namespaces and configurations files for cluster operator.
*/
protected void teardownEnvForOperator() {
deleteClusterOperatorInstallFiles();
cluster.deleteCustomResources();
cluster.deleteNamespaces();
}
/**
* Method to apply Strimzi cluster operator specific RoleBindings and ClusterRoleBindings for specific namespaces.
* @param namespace namespace where CO will be deployed to
* @param bindingsNamespaces list of namespaces where Bindings should be deployed to
*/
public static void applyBindings(ExtensionContext extensionContext, String namespace, List<String> bindingsNamespaces) {
for (String bindingsNamespace : bindingsNamespaces) {
applyClusterRoleBindings(extensionContext, namespace);
applyRoleBindings(extensionContext, namespace, bindingsNamespace);
}
}
/**
* Method for apply Strimzi cluster operator specific Role and ClusterRole bindings for specific namespaces.
* @param namespace namespace where CO will be deployed to
*/
public static void applyBindings(ExtensionContext extensionContext, String namespace) {
applyBindings(extensionContext, namespace, Collections.singletonList(namespace));
}
/**
* Method for apply Strimzi cluster operator specific Role and ClusterRole bindings for specific namespaces.
* @param namespace namespace where CO will be deployed to
* @param bindingsNamespaces array of namespaces where Bindings should be deployed to
*/
public static void applyBindings(ExtensionContext extensionContext, String namespace, String... bindingsNamespaces) {
applyBindings(extensionContext, namespace, Arrays.asList(bindingsNamespaces));
}
private static void applyClusterRoleBindings(ExtensionContext extensionContext, String namespace) {
// 021-ClusterRoleBinding
ClusterRoleBindingResource.clusterRoleBinding(extensionContext, Constants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/021-ClusterRoleBinding-strimzi-cluster-operator.yaml", namespace);
// 030-ClusterRoleBinding
ClusterRoleBindingResource.clusterRoleBinding(extensionContext, Constants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/030-ClusterRoleBinding-strimzi-cluster-operator-kafka-broker-delegation.yaml", namespace);
// 033-ClusterRoleBinding
ClusterRoleBindingResource.clusterRoleBinding(extensionContext, Constants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/033-ClusterRoleBinding-strimzi-cluster-operator-kafka-client-delegation.yaml", namespace);
}
protected static void applyRoleBindings(ExtensionContext extensionContext, String namespace, String bindingsNamespace) {
// 020-RoleBinding
RoleBindingResource.roleBinding(extensionContext, Constants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/020-RoleBinding-strimzi-cluster-operator.yaml", namespace, bindingsNamespace);
// 031-RoleBinding
RoleBindingResource.roleBinding(extensionContext, Constants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/031-RoleBinding-strimzi-cluster-operator-entity-operator-delegation.yaml", namespace, bindingsNamespace);
}
protected void assertResources(String namespace, String podName, String containerName, String memoryLimit, String cpuLimit, String memoryRequest, String cpuRequest) {
Pod po = kubeClient(namespace).getPod(namespace, podName);
assertThat("Not found an expected pod " + podName + " in namespace " + namespace + " but found " +
kubeClient(namespace).listPods(namespace).stream().map(p -> p.getMetadata().getName()).collect(Collectors.toList()), po, is(notNullValue()));
Optional optional = po.getSpec().getContainers().stream().filter(c -> c.getName().equals(containerName)).findFirst();
assertThat("Not found an expected container " + containerName, optional.isPresent(), is(true));
Container container = (Container) optional.get();
Map<String, Quantity> limits = container.getResources().getLimits();
assertThat(limits.get("memory"), is(new Quantity(memoryLimit)));
assertThat(limits.get("cpu"), is(new Quantity(cpuLimit)));
Map<String, Quantity> requests = container.getResources().getRequests();
assertThat(requests.get("memory"), is(new Quantity(memoryRequest)));
assertThat(requests.get("cpu"), is(new Quantity(cpuRequest)));
}
private void assertCmdOption(List<String> cmd, String expectedXmx) {
if (!cmd.contains(expectedXmx)) {
fail("Failed to find argument matching " + expectedXmx + " in java command line " +
cmd.stream().collect(Collectors.joining("\n")));
}
}
private List<List<String>> commandLines(String namespaceName, String podName, String containerName, String cmd) {
List<List<String>> result = new ArrayList<>();
String output = cmdKubeClient().namespace(namespaceName).execInPodContainer(podName, containerName, "/bin/bash", "-c",
"for pid in $(ps -C java -o pid h); do cat /proc/$pid/cmdline; done"
).out();
for (String cmdLine : output.split("\n")) {
result.add(asList(cmdLine.split("\0")));
}
return result;
}
protected void assertExpectedJavaOpts(String podName, String containerName, String expectedXmx, String expectedXms, String expectedXx) {
assertExpectedJavaOpts(kubeClient().getNamespace(), podName, containerName, expectedXmx, expectedXms, expectedXx);
}
protected void assertExpectedJavaOpts(String namespaceName, String podName, String containerName, String expectedXmx, String expectedXms, String expectedXx) {
List<List<String>> cmdLines = commandLines(namespaceName, podName, containerName, "java");
assertThat("Expected exactly 1 java process to be running", cmdLines.size(), is(1));
List<String> cmd = cmdLines.get(0);
int toIndex = cmd.indexOf("-jar");
if (toIndex != -1) {
// Just consider arguments to the JVM, not the application running in it
cmd = cmd.subList(0, toIndex);
// We should do something similar if the class not -jar was given, but that's
// hard to do properly.
}
if (expectedXmx != null)
assertCmdOption(cmd, expectedXmx);
if (expectedXms != null)
assertCmdOption(cmd, expectedXms);
if (expectedXx != null)
assertCmdOption(cmd, expectedXx);
}
public Map<String, String> getImagesFromConfig(String namespaceName) {
Map<String, String> images = new HashMap<>();
LOGGER.info(ResourceManager.getCoDeploymentName());
for (Container c : kubeClient(namespaceName).getDeployment(namespaceName, ResourceManager.getCoDeploymentName()).getSpec().getTemplate().getSpec().getContainers()) {
for (EnvVar envVar : c.getEnv()) {
images.put(envVar.getName(), envVar.getValue());
}
}
return images;
}
/**
* Verifies container configuration for specific component (kafka/zookeeper/bridge/mm) by environment key.
* @param namespaceName Namespace name where container is located
* @param podNamePrefix Name of pod where container is located
* @param containerName The container where verifying is expected
* @param configKey Expected configuration key
* @param config Expected component configuration
*/
protected void checkComponentConfiguration(String namespaceName, String podNamePrefix, String containerName, String configKey, Map<String, Object> config) {
LOGGER.info("Getting pods by prefix in name {}", podNamePrefix);
List<Pod> pods = kubeClient(namespaceName).listPodsByPrefixInName(podNamePrefix);
if (pods.size() != 0) {
LOGGER.info("Testing configuration for container {}", containerName);
Map<String, Object> actual = pods.stream()
.flatMap(p -> p.getSpec().getContainers().stream()) // get containers
.filter(c -> c.getName().equals(containerName))
.flatMap(c -> c.getEnv().stream().filter(envVar -> envVar.getName().equals(configKey)))
.map(envVar -> StUtils.loadProperties(envVar.getValue()))
.collect(Collectors.toList()).get(0);
assertThat(actual.entrySet().containsAll(config.entrySet()), is(true));
} else {
fail("Pod with prefix " + podNamePrefix + " in name, not found");
}
}
protected void checkComponentConfiguration(String podNamePrefix, String containerName, String configKey, Map<String, Object> config) {
checkComponentConfiguration(kubeClient().getNamespace(), podNamePrefix, containerName, configKey, config);
}
/**
* Verifies container environment variables passed as a map.
* @param namespaceName Namespace name where container is located
* @param podNamePrefix Name of pod where container is located
* @param containerName The container where verifying is expected
* @param config Expected environment variables with values
*/
protected void checkSpecificVariablesInContainer(String namespaceName, String podNamePrefix, String containerName, Map<String, String> config) {
LOGGER.info("Getting pods by prefix in name {}", podNamePrefix);
List<Pod> pods = kubeClient(namespaceName).listPodsByPrefixInName(podNamePrefix);
if (pods.size() != 0) {
LOGGER.info("Testing EnvVars configuration for container {}", containerName);
Map<String, Object> actual = pods.stream()
.flatMap(p -> p.getSpec().getContainers().stream()) // get containers
.filter(c -> c.getName().equals(containerName))
.flatMap(c -> c.getEnv().stream().filter(envVar -> config.containsKey(envVar.getName())))
.collect(Collectors.toMap(EnvVar::getName, EnvVar::getValue, (item, duplicatedItem) -> item));
assertThat(actual, is(config));
} else {
fail("Pod with prefix " + podNamePrefix + " in name, not found");
}
}
protected void checkSpecificVariablesInContainer(String podNamePrefix, String containerName, Map<String, String> config) {
checkSpecificVariablesInContainer(kubeClient().getNamespace(), podNamePrefix, containerName, config);
}
/**
* Verifies readinessProbe and livenessProbe properties in expected container
* @param namespaceName Namespace name where is container is located
* @param podNamePrefix Prefix of pod name where container is located
* @param containerName The container where verifying is expected
* @param initialDelaySeconds expected value for property initialDelaySeconds
* @param timeoutSeconds expected value for property timeoutSeconds
* @param periodSeconds expected value for property periodSeconds
* @param successThreshold expected value for property successThreshold
* @param failureThreshold expected value for property failureThreshold
*/
protected void checkReadinessLivenessProbe(String namespaceName, String podNamePrefix, String containerName, int initialDelaySeconds, int timeoutSeconds,
int periodSeconds, int successThreshold, int failureThreshold) {
LOGGER.info("Getting pods by prefix {} in pod name", podNamePrefix);
List<Pod> pods = kubeClient(namespaceName).listPodsByPrefixInName(podNamePrefix);
if (pods.size() != 0) {
LOGGER.info("Testing Readiness and Liveness configuration for container {}", containerName);
List<Container> containerList = pods.stream()
.flatMap(p -> p.getSpec().getContainers().stream())
.filter(c -> c.getName().equals(containerName))
.collect(Collectors.toList());
containerList.forEach(container -> {
assertThat(container.getLivenessProbe().getInitialDelaySeconds(), is(initialDelaySeconds));
assertThat(container.getReadinessProbe().getInitialDelaySeconds(), is(initialDelaySeconds));
assertThat(container.getLivenessProbe().getTimeoutSeconds(), is(timeoutSeconds));
assertThat(container.getReadinessProbe().getTimeoutSeconds(), is(timeoutSeconds));
assertThat(container.getLivenessProbe().getPeriodSeconds(), is(periodSeconds));
assertThat(container.getReadinessProbe().getPeriodSeconds(), is(periodSeconds));
assertThat(container.getLivenessProbe().getSuccessThreshold(), is(successThreshold));
assertThat(container.getReadinessProbe().getSuccessThreshold(), is(successThreshold));
assertThat(container.getLivenessProbe().getFailureThreshold(), is(failureThreshold));
assertThat(container.getReadinessProbe().getFailureThreshold(), is(failureThreshold));
});
} else {
fail("Pod with prefix " + podNamePrefix + " in name, not found");
}
}
protected void checkReadinessLivenessProbe(String podNamePrefix, String containerName, int initialDelaySeconds, int timeoutSeconds,
int periodSeconds, int successThreshold, int failureThreshold) {
checkReadinessLivenessProbe(kubeClient().getNamespace(), podNamePrefix, containerName, initialDelaySeconds,
timeoutSeconds, periodSeconds, successThreshold, failureThreshold);
}
protected void verifyLabelsForKafkaCluster(String clusterName, String appName) {
verifyLabelsForKafkaCluster(kubeClient().getNamespace(), kubeClient().getNamespace(), clusterName, appName);
}
protected void verifyLabelsForKafkaCluster(String clusterOperatorNamespaceName, String componentsNamespaceName, String clusterName, String appName) {
verifyLabelsOnPods(componentsNamespaceName, clusterName, "zookeeper", appName, Kafka.RESOURCE_KIND);
verifyLabelsOnPods(componentsNamespaceName, clusterName, "kafka", appName, Kafka.RESOURCE_KIND);
verifyLabelsOnCOPod(clusterOperatorNamespaceName);
verifyLabelsOnPods(componentsNamespaceName, clusterName, "entity-operator", appName, Kafka.RESOURCE_KIND);
verifyLabelsForCRDs(componentsNamespaceName);
verifyLabelsForKafkaAndZKServices(componentsNamespaceName, clusterName, appName);
verifyLabelsForSecrets(componentsNamespaceName, clusterName, appName);
verifyLabelsForConfigMaps(componentsNamespaceName, clusterName, appName, "");
verifyLabelsForRoleBindings(componentsNamespaceName, clusterName, appName);
verifyLabelsForServiceAccounts(componentsNamespaceName, clusterName, appName);
}
void verifyLabelsOnCOPod(String namespaceName) {
LOGGER.info("Verifying labels for cluster-operator pod");
Map<String, String> coLabels = kubeClient(namespaceName).listPods("name", ResourceManager.getCoDeploymentName()).get(0).getMetadata().getLabels();
assertThat(coLabels.get("name"), is(ResourceManager.getCoDeploymentName()));
assertThat(coLabels.get(Labels.STRIMZI_KIND_LABEL), is("cluster-operator"));
}
protected void verifyLabelsOnPods(String clusterName, String podType, String appName, String kind) {
verifyLabelsOnPods(kubeClient().getNamespace(), clusterName, podType, appName, kind);
}
protected void verifyLabelsOnPods(String namespaceName, String clusterName, String podType, String appName, String kind) {
LOGGER.info("Verifying labels on pod type {}", podType);
kubeClient(namespaceName).listPods().stream()
.filter(pod -> pod.getMetadata().getName().startsWith(clusterName.concat("-" + podType)))
.forEach(pod -> {
LOGGER.info("Verifying labels for pod: " + pod.getMetadata().getName());
assertThat(pod.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(clusterName));
assertThat(pod.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is(kind));
assertThat(pod.getMetadata().getLabels().get(Labels.STRIMZI_NAME_LABEL), is(clusterName.concat("-" + podType)));
});
}
void verifyLabelsForCRDs(String namespaceName) {
LOGGER.info("Verifying labels for CRDs");
String crds = cmdKubeClient(namespaceName).exec("get", "crds", "--selector=app=strimzi", "-o", "jsonpath='{.items[*].metadata.name}'").out();
crds = crds.replace(" ", "\n").trim();
assertThat(crds.split("\n").length, is(Crds.getNumCrds()));
}
void verifyLabelsForKafkaAndZKServices(String namespaceName, String clusterName, String appName) {
LOGGER.info("Verifying labels for Services");
String kafkaServiceName = clusterName + "-kafka";
String zookeeperServiceName = clusterName + "-zookeeper";
Map<String, String> servicesMap = new HashMap<>();
servicesMap.put(kafkaServiceName + "-bootstrap", kafkaServiceName);
servicesMap.put(kafkaServiceName + "-brokers", kafkaServiceName);
servicesMap.put(zookeeperServiceName + "-nodes", zookeeperServiceName);
servicesMap.put(zookeeperServiceName + "-client", zookeeperServiceName + "-client");
for (String serviceName : servicesMap.keySet()) {
kubeClient(namespaceName).listServices(namespaceName).stream()
.filter(service -> service.getMetadata().getName().equals(serviceName))
.forEach(service -> {
LOGGER.info("Verifying labels for service {}", serviceName);
assertThat(service.getMetadata().getLabels().get("app"), is(appName));
assertThat(service.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(clusterName));
assertThat(service.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("Kafka"));
assertThat(service.getMetadata().getLabels().get(Labels.STRIMZI_NAME_LABEL), is(servicesMap.get(serviceName)));
});
}
}
protected void verifyLabelsForService(String clusterName, String serviceToTest, String kind) {
verifyLabelsForConfigMaps(kubeClient().getNamespace(), clusterName, serviceToTest, kind);
}
protected void verifyLabelsForService(String namespaceName, String clusterName, String serviceToTest, String kind) {
LOGGER.info("Verifying labels for Kafka Connect Services");
String serviceName = clusterName.concat("-").concat(serviceToTest);
kubeClient(namespaceName).listServices().stream()
.filter(service -> service.getMetadata().getName().equals(serviceName))
.forEach(service -> {
LOGGER.info("Verifying labels for service {}", service.getMetadata().getName());
assertThat(service.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(clusterName));
assertThat(service.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is(kind));
assertThat(service.getMetadata().getLabels().get(Labels.STRIMZI_NAME_LABEL), is(serviceName));
}
);
}
void verifyLabelsForSecrets(String namespaceName, String clusterName, String appName) {
LOGGER.info("Verifying labels for secrets");
kubeClient(namespaceName).listSecrets(namespaceName).stream()
.filter(p -> p.getMetadata().getName().matches("(" + clusterName + ")-(clients|cluster|(entity))(-operator)?(-ca)?(-certs?)?"))
.forEach(p -> {
LOGGER.info("Verifying secret {}", p.getMetadata().getName());
assertThat(p.getMetadata().getLabels().get("app"), is(appName));
assertThat(p.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("Kafka"));
assertThat(p.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(clusterName));
}
);
}
protected void verifyLabelsForConfigMaps(String clusterName, String appName, String additionalClusterName) {
verifyLabelsForConfigMaps(kubeClient().getNamespace(), clusterName, appName, additionalClusterName);
}
protected void verifyLabelsForConfigMaps(String namespaceName, String clusterName, String appName, String additionalClusterName) {
LOGGER.info("Verifying labels for Config maps");
kubeClient(namespaceName).listConfigMaps()
.forEach(cm -> {
LOGGER.info("Verifying labels for CM {}", cm.getMetadata().getName());
if (cm.getMetadata().getName().equals(clusterName.concat("-connect-config"))) {
assertThat(cm.getMetadata().getLabels().get("app"), is(nullValue()));
assertThat(cm.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("KafkaConnect"));
} else if (cm.getMetadata().getName().contains("-mirror-maker-config")) {
assertThat(cm.getMetadata().getLabels().get("app"), is(nullValue()));
assertThat(cm.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("KafkaMirrorMaker"));
} else if (cm.getMetadata().getName().contains("-mirrormaker2-config")) {
assertThat(cm.getMetadata().getLabels().get("app"), is(nullValue()));
assertThat(cm.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("KafkaMirrorMaker2"));
} else if (cm.getMetadata().getName().equals(clusterName.concat("-kafka-config"))) {
assertThat(cm.getMetadata().getLabels().get("app"), is(appName));
assertThat(cm.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("Kafka"));
assertThat(cm.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(clusterName));
} else if (cm.getMetadata().getName().equals(additionalClusterName.concat("-kafka-config"))) {
assertThat(cm.getMetadata().getLabels().get("app"), is(appName));
assertThat(cm.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("Kafka"));
assertThat(cm.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(additionalClusterName));
} else {
LOGGER.info("CM {} is not related to current test", cm.getMetadata().getName());
}
}
);
}
protected void verifyLabelsForServiceAccounts(String clusterName, String appName) {
verifyLabelsForServiceAccounts(kubeClient().getNamespace(), clusterName, appName);
}
protected void verifyLabelsForServiceAccounts(String namespaceName, String clusterName, String appName) {
LOGGER.info("Verifying labels for Service Accounts");
kubeClient(namespaceName).listServiceAccounts(namespaceName).stream()
.filter(sa -> sa.getMetadata().getName().equals("strimzi-cluster-operator"))
.forEach(sa -> {
LOGGER.info("Verifying labels for service account {}", sa.getMetadata().getName());
assertThat(sa.getMetadata().getLabels().get("app"), is("strimzi"));
}
);
kubeClient(namespaceName).listServiceAccounts(namespaceName).stream()
.filter(sa -> sa.getMetadata().getName().startsWith(clusterName))
.forEach(sa -> {
LOGGER.info("Verifying labels for service account {}", sa.getMetadata().getName());
if (sa.getMetadata().getName().equals(clusterName.concat("-connect"))) {
assertThat(sa.getMetadata().getLabels().get("app"), is(nullValue()));
assertThat(sa.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("KafkaConnect"));
} else if (sa.getMetadata().getName().equals(clusterName.concat("-mirror-maker"))) {
assertThat(sa.getMetadata().getLabels().get("app"), is(nullValue()));
assertThat(sa.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("KafkaMirrorMaker"));
} else {
assertThat(sa.getMetadata().getLabels().get("app"), is(appName));
assertThat(sa.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("Kafka"));
}
assertThat(sa.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(clusterName));
}
);
}
void verifyLabelsForRoleBindings(String namespaceName, String clusterName, String appName) {
LOGGER.info("Verifying labels for Cluster Role bindings");
kubeClient(namespaceName).listRoleBindings(namespaceName).stream()
.filter(rb -> rb.getMetadata().getName().startsWith("strimzi-cluster-operator"))
.forEach(rb -> {
LOGGER.info("Verifying labels for cluster role {}", rb.getMetadata().getName());
assertThat(rb.getMetadata().getLabels().get("app"), is("strimzi"));
});
kubeClient(namespaceName).listRoleBindings(namespaceName).stream()
.filter(rb -> rb.getMetadata().getName().startsWith("strimzi-".concat(clusterName)))
.forEach(rb -> {
LOGGER.info("Verifying labels for cluster role {}", rb.getMetadata().getName());
assertThat(rb.getMetadata().getLabels().get("app"), is(appName));
assertThat(rb.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(clusterName));
assertThat(rb.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is("Kafka"));
}
);
}
protected void verifyCRStatusCondition(Condition condition, String status, Enum<?> type) {
verifyCRStatusCondition(condition, null, null, status, type);
}
protected void verifyCRStatusCondition(Condition condition, String message, String reason, String status, Enum<?> type) {
assertThat(condition.getStatus(), is(status));
assertThat(condition.getType(), is(type.toString()));
if (condition.getMessage() != null && condition.getReason() != null) {
assertThat(condition.getMessage(), containsString(message));
assertThat(condition.getReason(), is(reason));
}
}
protected void assertNoCoErrorsLogged(String namespaceName, long sinceSeconds) {
LOGGER.info("Search in strimzi-cluster-operator log for errors in last {} seconds", sinceSeconds);
String clusterOperatorLog = cmdKubeClient().searchInLog("deploy", ResourceManager.getCoDeploymentName(), sinceSeconds, "Exception", "Error", "Throwable");
assertThat(clusterOperatorLog, logHasNoUnexpectedErrors());
}
protected void assertNoCoErrorsLogged(long sinceSeconds) {
assertNoCoErrorsLogged(kubeClient().getNamespace(), sinceSeconds);
}
protected void testDockerImagesForKafkaCluster(String clusterName, String clusterOperatorNamespaceName, String kafkaNamespaceName,
int kafkaPods, int zkPods, boolean rackAwareEnabled) {
LOGGER.info("Verifying docker image names");
//Verifying docker image for cluster-operator
Map<String, String> imgFromDeplConf = getImagesFromConfig(clusterOperatorNamespaceName);
String kafkaVersion = Crds.kafkaOperation(kubeClient(kafkaNamespaceName).getClient()).inNamespace(kafkaNamespaceName).withName(clusterName).get().getSpec().getKafka().getVersion();
if (kafkaVersion == null) {
kafkaVersion = Environment.ST_KAFKA_VERSION;
}
//Verifying docker image for zookeeper pods
for (int i = 0; i < zkPods; i++) {
String imgFromPod = PodUtils.getContainerImageNameFromPod(kafkaNamespaceName, KafkaResources.zookeeperPodName(clusterName, i), "zookeeper");
assertThat("Zookeeper pod " + i + " uses wrong image", imgFromPod, containsString(TestUtils.parseImageMap(imgFromDeplConf.get(KAFKA_IMAGE_MAP)).get(kafkaVersion)));
}
//Verifying docker image for kafka pods
for (int i = 0; i < kafkaPods; i++) {
String imgFromPod = PodUtils.getContainerImageNameFromPod(kafkaNamespaceName, KafkaResources.kafkaPodName(clusterName, i), "kafka");
assertThat("Kafka pod " + i + " uses wrong image", imgFromPod, containsString(TestUtils.parseImageMap(imgFromDeplConf.get(KAFKA_IMAGE_MAP)).get(kafkaVersion)));
if (rackAwareEnabled) {
String initContainerImage = PodUtils.getInitContainerImageName(KafkaResources.kafkaPodName(clusterName, i));
assertThat(initContainerImage, is(imgFromDeplConf.get(KAFKA_INIT_IMAGE)));
}
}
//Verifying docker image for entity-operator
String entityOperatorPodName = cmdKubeClient(kafkaNamespaceName).listResourcesByLabel("pod",
Labels.STRIMZI_NAME_LABEL + "=" + clusterName + "-entity-operator").get(0);
String imgFromPod = PodUtils.getContainerImageNameFromPod(kafkaNamespaceName, entityOperatorPodName, "topic-operator");
assertThat(imgFromPod, containsString(imgFromDeplConf.get(TO_IMAGE)));
imgFromPod = PodUtils.getContainerImageNameFromPod(kafkaNamespaceName, entityOperatorPodName, "user-operator");
assertThat(imgFromPod, containsString(imgFromDeplConf.get(UO_IMAGE)));
imgFromPod = PodUtils.getContainerImageNameFromPod(kafkaNamespaceName, entityOperatorPodName, "tls-sidecar");
assertThat(imgFromPod, containsString(imgFromDeplConf.get(TLS_SIDECAR_EO_IMAGE)));
LOGGER.info("Docker images verified");
}
protected void testDockerImagesForKafkaCluster(String clusterName, String namespaceName,
int kafkaPods, int zkPods, boolean rackAwareEnabled) {
testDockerImagesForKafkaCluster(clusterName, namespaceName, namespaceName, kafkaPods, zkPods, rackAwareEnabled);
}
protected void afterEachMayOverride(ExtensionContext extensionContext) throws Exception {
if (!Environment.SKIP_TEARDOWN) {
ResourceManager.getInstance().deleteResources(extensionContext);
// if 'parallel namespace test' we are gonna delete namespace
if (StUtils.isParallelNamespaceTest(extensionContext)) {
final String namespaceToDelete = extensionContext.getStore(ExtensionContext.Namespace.GLOBAL).get(Constants.NAMESPACE_KEY).toString();
LOGGER.info("Deleting namespace:{} for test case:{}", namespaceToDelete, extensionContext.getDisplayName());
cluster.deleteNamespace(extensionContext, namespaceToDelete);
}
}
}
protected void afterAllMayOverride(ExtensionContext extensionContext) throws Exception {
if (!Environment.SKIP_TEARDOWN) {
teardownEnvForOperator();
ResourceManager.getInstance().deleteResources(extensionContext);
}
}
/**
* BeforeEachMayOverride, is a method, which gives you option to override @BeforeAll in sub-classes and
* ensure that this is also executed if you call it with super.beforeEachMayOverride(). You can also skip it and
* you your implementation in sub-class as you want.
* @param extensionContext
*/
protected void beforeEachMayOverride(ExtensionContext extensionContext) {
// this is because we need to have different clusterName and kafkaClientsName in each test case without
// synchronization it can produce `data-race`
String testName = null;
String testClass = null;
synchronized (lock) {
if (extensionContext.getTestClass().isPresent()) testClass = extensionContext.getTestClass().get().getName();
if (extensionContext.getTestMethod().isPresent()) testName = extensionContext.getTestMethod().get().getName();
LOGGER.info("Not first test we are gonna generate cluster name");
String clusterName = CLUSTER_NAME_PREFIX + new Random().nextInt(Integer.MAX_VALUE);
mapWithClusterNames.put(testName, clusterName);
mapWithTestTopics.put(testName, KafkaTopicUtils.generateRandomNameOfTopic());
mapWithTestUsers.put(testName, KafkaUserUtils.generateRandomNameOfKafkaUser());
mapWithKafkaClientNames.put(testName, clusterName + "-" + Constants.KAFKA_CLIENTS);
LOGGER.debug("CLUSTER_NAMES_MAP: \n{}", mapWithClusterNames);
LOGGER.debug("USERS_NAME_MAP: \n{}", mapWithTestUsers);
LOGGER.debug("TOPIC_NAMES_MAP: \n{}", mapWithTestTopics);
LOGGER.debug("============THIS IS CLIENTS MAP:\n{}", mapWithKafkaClientNames);
// if 'parallel namespace test' we are gonna create
if (StUtils.isParallelNamespaceTest(extensionContext)) {
final String namespaceTestCase = "namespace-" + counterOfNamespaces.getAndIncrement();
extensionContext.getStore(ExtensionContext.Namespace.GLOBAL).put(Constants.NAMESPACE_KEY, namespaceTestCase);
// create namespace by
LOGGER.info("Creating namespace:{} for test case:{}", namespaceTestCase, testName);
cluster.createNamespace(namespaceTestCase);
NetworkPolicyResource.applyDefaultNetworkPolicySettings(extensionContext, Collections.singletonList(namespaceTestCase));
}
}
}
/**
* BeforeAllMayOverride, is a method, which gives you option to override @BeforeAll in sub-classes and
* ensure that this is also executed if you call it with super.beforeAllMayOverride(). You can also skip it and
* you your implementation in sub-class as you want.
* @param extensionContext
*/
protected void beforeAllMayOverride(ExtensionContext extensionContext) {
cluster = KubeClusterResource.getInstance();
String testClass = null;
if (extensionContext.getTestClass().isPresent()) {
testClass = extensionContext.getTestClass().get().getName();
}
}
@BeforeEach
void setUpTestCase(ExtensionContext extensionContext) {
LOGGER.debug(String.join("", Collections.nCopies(76, "=")));
LOGGER.debug("{} - [BEFORE EACH] has been called", this.getClass().getName());
beforeEachMayOverride(extensionContext);
}
@BeforeAll
void setUpTestSuite(ExtensionContext extensionContext) {
LOGGER.debug(String.join("", Collections.nCopies(76, "=")));
LOGGER.debug("{} - [BEFORE ALL] has been called", this.getClass().getName());
beforeAllMayOverride(extensionContext);
}
@AfterEach
void tearDownTestCase(ExtensionContext extensionContext) throws Exception {
LOGGER.debug(String.join("", Collections.nCopies(76, "=")));
LOGGER.debug("{} - [AFTER EACH] has been called", this.getClass().getName());
afterEachMayOverride(extensionContext);
}
@AfterAll
void tearDownTestSuite(ExtensionContext extensionContext) throws Exception {
LOGGER.debug(String.join("", Collections.nCopies(76, "=")));
LOGGER.debug("{} - [AFTER ALL] has been called", this.getClass().getName());
afterAllMayOverride(extensionContext);
}
}
|
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webauthn4j.validator.attestation.statement.tpm;
import com.webauthn4j.data.attestation.authenticator.AAGUID;
import com.webauthn4j.data.attestation.authenticator.AuthenticatorData;
import com.webauthn4j.data.attestation.statement.*;
import com.webauthn4j.data.extension.authenticator.RegistrationExtensionAuthenticatorOutput;
import com.webauthn4j.data.SignatureAlgorithm;
import com.webauthn4j.data.x500.X500Name;
import com.webauthn4j.util.MessageDigestUtil;
import com.webauthn4j.util.SignatureUtil;
import com.webauthn4j.util.UnsignedNumberUtil;
import com.webauthn4j.validator.CoreRegistrationObject;
import com.webauthn4j.validator.attestation.statement.AbstractStatementValidator;
import com.webauthn4j.validator.exception.BadAttestationStatementException;
import org.apache.kerby.asn1.type.Asn1Utf8String;
import org.apache.kerby.asn1.util.HexUtil;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.security.*;
import java.security.cert.CertificateParsingException;
import java.security.cert.X509Certificate;
import java.security.interfaces.ECPublicKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.EllipticCurve;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
public class TPMAttestationStatementValidator extends AbstractStatementValidator<TPMAttestationStatement> {
private static final String ID_FIDO_GEN_CE_AAGUID = "1.3.6.1.4.1.45724.1.1.4";
// ~ Instance fields
// ================================================================================================
private TPMDevicePropertyValidator tpmDevicePropertyValidator = new NullTPMDevicePropertyValidator();
@Override
public AttestationType validate(CoreRegistrationObject registrationObject) {
if (!supports(registrationObject)) {
throw new IllegalArgumentException("Specified format is not supported by " + this.getClass().getName());
}
TPMAttestationStatement attestationStatement = (TPMAttestationStatement) registrationObject.getAttestationObject().getAttestationStatement();
if (!attestationStatement.getVer().equals(TPMAttestationStatement.VERSION_2_0)) {
throw new BadAttestationStatementException("TPM version is not supported.");
}
TPMSAttest certInfo = attestationStatement.getCertInfo();
TPMTPublic pubArea = attestationStatement.getPubArea();
AuthenticatorData<RegistrationExtensionAuthenticatorOutput> authenticatorData = registrationObject.getAttestationObject().getAuthenticatorData();
/// Verify that the public key specified by the parameters and unique fields of pubArea is identical to the credentialPublicKey in the attestedCredentialData in authenticatorData.
validatePublicKeyEquality(pubArea, authenticatorData);
/// Concatenate authenticatorData and clientDataHash to form attToBeSigned.
byte[] attToBeSigned = getAttToBeSigned(registrationObject);
/// Validate that certInfo is valid:
/// Verify that magic is set to TPM_GENERATED_VALUE.
if (certInfo.getMagic() != TPMGenerated.TPM_GENERATED_VALUE) {
throw new BadAttestationStatementException("magic must be TPM_GENERATED_VALUE");
}
/// Verify that type is set to TPM_ST_ATTEST_CERTIFY.
if (certInfo.getType() != TPMISTAttest.TPM_ST_ATTEST_CERTIFY) {
throw new BadAttestationStatementException("type must be TPM_ST_ATTEST_CERTIFY");
}
/// Verify that extraData is set to the hash of attToBeSigned using the hash algorithm employed in "alg".
COSEAlgorithmIdentifier alg = attestationStatement.getAlg();
MessageDigest messageDigest = getMessageDigest(alg);
byte[] hash = messageDigest.digest(attToBeSigned);
if (!Arrays.equals(certInfo.getExtraData(), hash)) {
throw new BadAttestationStatementException("extraData must be equals to the hash of attToBeSigned");
}
/// Verify that attested contains a TPMS_CERTIFY_INFO structure as specified in [TPMv2-Part2] section 10.12.3,
/// whose name field contains a valid Name for pubArea, as computed using the algorithm in the nameAlg field of
/// pubArea using the procedure specified in [TPMv2-Part1] section 16.
TPMSCertifyInfo certifyInfo = (TPMSCertifyInfo) certInfo.getAttested();
TPMIAlgHash hashAlg = certifyInfo.getName().getHashAlg();
String algJcaName;
algJcaName = getAlgJcaName(hashAlg);
byte[] pubAreaDigest = MessageDigestUtil.createMessageDigest(algJcaName).digest(pubArea.getBytes());
if (!Arrays.equals(pubAreaDigest, certifyInfo.getName().getDigest())) {
throw new BadAttestationStatementException("hash of `attested` doesn't match with name field of certifyInfo");
}
/// Note that the remaining fields in the "Standard Attestation Structure" [TPMv2-Part1] section 31.2,
/// i.e., qualifiedSigner, clockInfo and firmwareVersion are ignored. These fields MAY be used as an input to risk engines.
/// If x5c is present, this indicates that the attestation type is not ECDAA. In this case:
if (attestationStatement.getX5c() != null) {
validateX5c(attestationStatement, certInfo, authenticatorData);
/// If successful, return implementation-specific values representing attestation type AttCA and attestation trust path x5c.
return AttestationType.ATT_CA;
}
throw new BadAttestationStatementException("`x5c` or `ecdaaKeyId` must be present.");
}
private MessageDigest getMessageDigest(COSEAlgorithmIdentifier alg) {
try {
SignatureAlgorithm signatureAlgorithm = alg.toSignatureAlgorithm();
return signatureAlgorithm.getMessageDigestAlgorithm().createMessageDigestObject();
} catch (IllegalArgumentException e) {
throw new BadAttestationStatementException("alg is not signature algorithm", e);
}
}
private void validateX5c(TPMAttestationStatement attestationStatement, TPMSAttest certInfo, AuthenticatorData<RegistrationExtensionAuthenticatorOutput> authenticatorData) {
X509Certificate aikCert = attestationStatement.getX5c().getEndEntityAttestationCertificate().getCertificate();
/// Verify the sig is a valid signature over certInfo using the attestation public key in aikCert with the algorithm specified in alg.
String jcaName = getJcaName(attestationStatement.getAlg());
Signature certInfoSignature = SignatureUtil.createSignature(jcaName);
try {
certInfoSignature.initVerify(aikCert.getPublicKey());
certInfoSignature.update(certInfo.getBytes());
if (!certInfoSignature.verify(attestationStatement.getSig())) {
throw new BadAttestationStatementException("hash of certInfo doesn't match with sig.");
}
} catch (SignatureException | InvalidKeyException e) {
throw new BadAttestationStatementException("Failed to validate the signature.", e);
}
/// Verify that aikCert meets the requirements in §8.3.1 TPM Attestation Statement Certificate Requirements.
validateAikCert(aikCert);
/// If aikCert contains an extension with OID 1 3 6 1 4 1 45724 1 1 4 (id-fido-gen-ce-aaguid) verify that the value of this extension matches the aaguid in authenticatorData.
byte[] aaguidBytes = aikCert.getExtensionValue(ID_FIDO_GEN_CE_AAGUID);
if (aaguidBytes != null && !Objects.equals(new AAGUID(aaguidBytes), authenticatorData.getAttestedCredentialData().getAaguid())) {
throw new BadAttestationStatementException("AAGUID in aikCert doesn't match with that in authenticatorData");
}
}
String getAlgJcaName(TPMIAlgHash alg) {
String algJcaName;
switch (alg) {
case TPM_ALG_SHA1:
algJcaName = "SHA-1";
break;
case TPM_ALG_SHA256:
algJcaName = "SHA-256";
break;
case TPM_ALG_SHA384:
algJcaName = "SHA-384";
break;
case TPM_ALG_SHA512:
algJcaName = "SHA-512";
break;
default:
throw new BadAttestationStatementException("nameAlg '" + alg.name() + "' is not supported.");
}
return algJcaName;
}
public TPMDevicePropertyValidator getTpmDevicePropertyValidator() {
return tpmDevicePropertyValidator;
}
public void setTpmDevicePropertyValidator(TPMDevicePropertyValidator tpmDevicePropertyValidator) {
this.tpmDevicePropertyValidator = tpmDevicePropertyValidator;
}
private void validatePublicKeyEquality(TPMTPublic pubArea, AuthenticatorData<RegistrationExtensionAuthenticatorOutput> authenticatorData) {
PublicKey publicKeyInAuthData =
authenticatorData.getAttestedCredentialData().getCOSEKey().getPublicKey();
TPMUPublicId publicKeyInPubArea = pubArea.getUnique();
if (pubArea.getType() == TPMIAlgPublic.TPM_ALG_RSA && publicKeyInPubArea instanceof RSAUnique) {
RSAPublicKey rsaPublicKey = (RSAPublicKey) publicKeyInAuthData;
TPMSRSAParms parms = (TPMSRSAParms) pubArea.getParameters();
RSAUnique rsaUnique = (RSAUnique) publicKeyInPubArea;
long exponent = UnsignedNumberUtil.getUnsignedInt(parms.getExponent());
if (exponent == 0) {
exponent = 65537; // 2^16 + 1
}
if (rsaPublicKey.getModulus().equals(new BigInteger(1, rsaUnique.getN())) &&
rsaPublicKey.getPublicExponent().equals(BigInteger.valueOf(exponent))) {
return;
}
} else if (pubArea.getType() == TPMIAlgPublic.TPM_ALG_ECDSA && publicKeyInPubArea instanceof ECCUnique) {
ECPublicKey ecPublicKey = (ECPublicKey) publicKeyInAuthData;
TPMSECCParms parms = (TPMSECCParms) pubArea.getParameters();
EllipticCurve curveInParms = parms.getCurveId().getEllipticCurve();
ECCUnique eccUnique = (ECCUnique) publicKeyInPubArea;
if (ecPublicKey.getParams().getCurve().equals(curveInParms) &&
ecPublicKey.getW().getAffineX().equals(new BigInteger(1, eccUnique.getX())) &&
ecPublicKey.getW().getAffineY().equals(new BigInteger(1, eccUnique.getY()))) {
return;
}
}
throw new BadAttestationStatementException("publicKey in authData and publicKey in unique pubArea doesn't match");
}
void validateAikCert(X509Certificate certificate) {
try {
/// TPM attestation certificate MUST have the following fields/extensions:
/// Version MUST be set to 3.
if (!Objects.equals(certificate.getVersion(), 3)) {
throw new BadAttestationStatementException("x5c must be version 3.");
}
/// Subject field MUST be set to empty.
if (!certificate.getSubjectDN().getName().isEmpty()) {
throw new BadAttestationStatementException("x5c subject field MUST be set to empty");
}
/// The Subject Alternative Name extension MUST be set as defined in [TPMv2-EK-Profile] section 3.2.9.
validateSubjectAlternativeName(certificate);
/// The Extended Key Usage extension MUST contain the "joint-iso-itu-t(2) internationalorganizations(23) 133 tcg-kp(8) tcg-kp-AIKCertificate(3)" OID.
if (certificate.getExtendedKeyUsage() == null || !certificate.getExtendedKeyUsage().contains("2.23.133.8.3")) {
throw new BadAttestationStatementException("Attestation certificate doesn't contain tcg-kp-AIKCertificate (2.23.133.8.3) OID");
}
/// The Basic Constraints extension MUST have the CA component set to false.
if (certificate.getBasicConstraints() != -1) {
throw new BadAttestationStatementException("The Basic Constraints extension of attestation certificate must have the CA component set to false");
}
/// An Authority Information Access (AIA) extension with entry id-ad-ocsp and a CRL Distribution Point
/// extension [RFC5280] are both OPTIONAL as the status of many attestation certificates is available
/// through metadata services. See, for example, the FIDO Metadata Service [FIDOMetadataService].
} catch (CertificateParsingException e) {
throw new BadAttestationStatementException("Failed to parse attestation certificate", e);
}
}
private void validateSubjectAlternativeName(X509Certificate certificate) throws CertificateParsingException {
try {
for (List<?> entry : certificate.getSubjectAlternativeNames()) {
if (entry.get(0).equals(4)) {
X500Name directoryName = new X500Name((String) entry.get(1));
TPMDeviceProperty tpmDeviceProperty = parseTPMDeviceProperty(directoryName);
tpmDevicePropertyValidator.validate(tpmDeviceProperty);
return;
}
}
} catch (IOException | RuntimeException e) {
throw new BadAttestationStatementException("The Subject Alternative Name extension of attestation certificate does not contain a TPM device property", e);
}
throw new BadAttestationStatementException("The Subject Alternative Name extension of attestation certificate does not contain a TPM device property");
}
TPMDeviceProperty parseTPMDeviceProperty(X500Name directoryName) throws IOException {
Map<String, String> map = directoryName.stream().flatMap(attributes -> attributes.entrySet().stream()).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
String manufacturerAttr = map.get("2.23.133.2.1");
String partNumberAttr = map.get("2.23.133.2.2");
String firmwareVersionAttr = map.get("2.23.133.2.3");
String manufacturer = decodeAttr(manufacturerAttr);
String partNumber = decodeAttr(partNumberAttr);
String firmwareVersion = decodeAttr(firmwareVersionAttr);
return new TPMDeviceProperty(manufacturer, partNumber, firmwareVersion);
}
private String decodeAttr(String attr) throws IOException {
String value = null;
if (attr != null) {
byte[] bytes = HexUtil.hex2bytes(attr.substring(1));
Asn1Utf8String attrAsn1Utf8String = new Asn1Utf8String();
attrAsn1Utf8String.decode(bytes);
value = attrAsn1Utf8String.getValue();
}
return value;
}
private byte[] getAttToBeSigned(CoreRegistrationObject registrationObject) {
byte[] authenticatorData = registrationObject.getAuthenticatorDataBytes();
byte[] clientDataHash = registrationObject.getClientDataHash();
return ByteBuffer.allocate(authenticatorData.length + clientDataHash.length).put(authenticatorData).put(clientDataHash).array();
}
}
|
package theGartic.patches;
import basemod.ReflectionHacks;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import com.evacipated.cardcrawl.modthespire.lib.SpirePatch;
import com.evacipated.cardcrawl.modthespire.lib.SpirePrefixPatch;
import com.evacipated.cardcrawl.modthespire.lib.SpireReturn;
import com.megacrit.cardcrawl.actions.AbstractGameAction;
import com.megacrit.cardcrawl.core.CardCrawlGame;
import com.megacrit.cardcrawl.helpers.ImageMaster;
import com.megacrit.cardcrawl.vfx.combat.FlashAtkImgEffect;
import theGartic.GarticMod;
public class AttackEffectsPatch {
@SpirePatch(
clz = FlashAtkImgEffect.class,
method = "loadImage"
)
public static class VfxPatch {
@SpirePrefixPatch
public static SpireReturn<TextureAtlas.AtlasRegion> Prefix(FlashAtkImgEffect __instance) {
AbstractGameAction.AttackEffect effect = ReflectionHacks.getPrivate(__instance, FlashAtkImgEffect.class, "effect");
if (effect == GarticMod.Enums.GUNSHOT) {
// Texture texture = TexLoader.getTexture(Path to your file);
// TextureAtlas.AtlasRegion atReg = new TextureAtlas.AtlasRegion(texture, 0, 0, texture.getWidth(), texture.getHeight());
// return SpireReturn.Return(atReg);
return SpireReturn.Return(ImageMaster.ATK_SLASH_H);
}
return SpireReturn.Continue();
}
}
@SpirePatch(
clz = FlashAtkImgEffect.class,
method = "playSound"
)
public static class SfxPatch {
@SpirePrefixPatch
public static SpireReturn Prefix(FlashAtkImgEffect __instance, AbstractGameAction.AttackEffect effect) {
if (effect == GarticMod.Enums.GUNSHOT) {
CardCrawlGame.sound.play(GarticMod.GUNSHOT_KEY);
return SpireReturn.Return();
}
return SpireReturn.Continue();
}
}
}
|
package io.khasang.gahelp.jpa;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.dao.annotation.PersistenceExceptionTranslationPostProcessor;
import org.springframework.orm.hibernate5.HibernateTransactionManager;
import org.springframework.orm.hibernate5.LocalSessionFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.sql.DataSource;
import java.util.Properties;
@Configuration
@EnableTransactionManagement
@PropertySource(value = "classpath:hibernate.properties")
public class HibernateConfig {
private Environment environment;
private DataSource dataSource;
@Bean
// @Autowired
public HibernateTransactionManager hibernateTransactionManager(SessionFactory sessionFactory) {
HibernateTransactionManager transactionManager = new HibernateTransactionManager();
transactionManager.setSessionFactory(sessionFactory);
return transactionManager;
}
@Bean
public PersistenceExceptionTranslationPostProcessor exceptionTranslation() {
return new PersistenceExceptionTranslationPostProcessor();
}
// Settings for hibernate
@Bean
public LocalSessionFactoryBean sessionFactoryBean() {
LocalSessionFactoryBean sessionFactoryBean = new LocalSessionFactoryBean();
sessionFactoryBean.setDataSource(dataSource);
sessionFactoryBean.setPackagesToScan("io.khasang.gahelp.entity");
sessionFactoryBean.setHibernateProperties(properties());
return sessionFactoryBean;
}
private Properties properties() {
Properties properties = new Properties();
properties.put("hibernate.dialect", environment.getRequiredProperty("hibernate.dialect"));
properties.put("hibernate.show_sql", environment.getRequiredProperty("hibernate.show_sql"));
properties.put("hibernate.format_sql", environment.getRequiredProperty("hibernate.format_sql"));
properties.put("hibernate.hbm2ddl.auto", environment.getRequiredProperty("hibernate.hbm2ddl.auto"));
properties.put("show_sql", environment.getRequiredProperty("hibernate.show_sql"));
return properties;
}
@Autowired
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
@Autowired
public void setEnvironment(Environment environment) {
this.environment = environment;
}
}
|
package ca.com.rbc.provisioningservlet;
import org.camunda.bpm.application.ProcessApplication;
import org.camunda.bpm.application.impl.ServletProcessApplication;
/**
* Process Application exposing this application's resources to the process engine.
*/
@ProcessApplication
public class CamundaBpmProcessApplication extends ServletProcessApplication {
}
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flink.runtime.operators.coordination;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.messages.Acknowledge;
import org.apache.flink.util.FlinkRuntimeException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
public class MockOperatorCoordinatorContext implements OperatorCoordinator.Context {
private final OperatorID operatorID;
private final int numSubtasks;
private final boolean failEventSending;
private final Map<Integer, List<OperatorEvent>> eventsToOperator;
private final LinkedHashMap<Integer, Throwable> failedTasks;
private boolean jobFailed;
public MockOperatorCoordinatorContext(OperatorID operatorID, int numSubtasks) {
this(operatorID, numSubtasks, true);
}
public MockOperatorCoordinatorContext(OperatorID operatorID, int numSubtasks, boolean failEventSending) {
this.operatorID = operatorID;
this.numSubtasks = numSubtasks;
this.eventsToOperator = new HashMap<>();
this.failedTasks = new LinkedHashMap<>();
this.jobFailed = false;
this.failEventSending = failEventSending;
}
@Override
public OperatorID getOperatorId() {
return operatorID;
}
@Override
public CompletableFuture<Acknowledge> sendEvent(
OperatorEvent evt,
int targetSubtask) throws TaskNotRunningException {
eventsToOperator.computeIfAbsent(targetSubtask, subtaskId -> new ArrayList<>()).add(evt);
if (failEventSending) {
CompletableFuture<Acknowledge> future = new CompletableFuture<>();
future.completeExceptionally(new FlinkRuntimeException("Testing Exception to fail event sending."));
return future;
} else {
return CompletableFuture.completedFuture(Acknowledge.get());
}
}
@Override
public void failTask(int subtask, Throwable cause) {
failedTasks.put(subtask, cause);
}
@Override
public void failJob(Throwable cause) {
jobFailed = true;
}
@Override
public int currentParallelism() {
return numSubtasks;
}
// -------------------------------
public List<OperatorEvent> getEventsToOperatorBySubtaskId(int subtaskId) {
return eventsToOperator.get(subtaskId);
}
public Map<Integer, List<OperatorEvent>> getEventsToOperator() {
return eventsToOperator;
}
public LinkedHashMap<Integer, Throwable> getFailedTasks() {
return failedTasks;
}
public boolean isJobFailed() {
return jobFailed;
}
}
|
package com.fluxtream.connectors.updaters;
public class UpdateResult {
UpdateResult(ResultType resultType) {
type = resultType;
}
public UpdateResult() {}
public ResultType type = ResultType.NO_RESULT;
public transient String stackTrace;
public enum ResultType {
NO_RESULT, UPDATE_SUCCEEDED, UPDATE_FAILED, HAS_REACHED_RATE_LIMIT,
DUPLICATE_UPDATE
}
public static UpdateResult failedResult(String stackTrace) {
UpdateResult updateResult = new UpdateResult(ResultType.UPDATE_FAILED);
updateResult.stackTrace = stackTrace;
return updateResult;
}
public static UpdateResult successResult() {
return new UpdateResult(ResultType.UPDATE_SUCCEEDED);
}
public static UpdateResult rateLimitReachedResult() {
return new UpdateResult(ResultType.HAS_REACHED_RATE_LIMIT);
}
}
|
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: koubei.retail.wms.inboundorder.create response.
*
* @author auto create
* @since 1.0, 2019-01-07 20:51:15
*/
public class KoubeiRetailWmsInboundorderCreateResponse extends AlipayResponse {
private static final long serialVersionUID = 5469216449231745762L;
/**
* 入库通知单id
*/
@ApiField("inbound_order_id")
private String inboundOrderId;
public void setInboundOrderId(String inboundOrderId) {
this.inboundOrderId = inboundOrderId;
}
public String getInboundOrderId( ) {
return this.inboundOrderId;
}
}
|
package ca.uhn.fhir.jpa.dao.dstu3;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum;
import ca.uhn.fhir.jpa.test.BaseJpaDstu3Test;
import ca.uhn.fhir.jpa.test.config.TestHibernateSearchAddInConfig;
import ca.uhn.fhir.jpa.util.TestUtil;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.CompositeParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.HasAndListParam;
import ca.uhn.fhir.rest.param.HasOrListParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceAndListParam;
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenAndListParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.param.UriParamQualifierEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.dstu3.model.Appointment;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.CodeType;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.Condition;
import org.hl7.fhir.dstu3.model.ContactPoint.ContactPointSystem;
import org.hl7.fhir.dstu3.model.DateTimeType;
import org.hl7.fhir.dstu3.model.DateType;
import org.hl7.fhir.dstu3.model.Device;
import org.hl7.fhir.dstu3.model.DiagnosticReport;
import org.hl7.fhir.dstu3.model.Encounter;
import org.hl7.fhir.dstu3.model.Enumerations.AdministrativeGender;
import org.hl7.fhir.dstu3.model.Group;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.Immunization;
import org.hl7.fhir.dstu3.model.ImmunizationRecommendation;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.Location;
import org.hl7.fhir.dstu3.model.Medication;
import org.hl7.fhir.dstu3.model.MedicationAdministration;
import org.hl7.fhir.dstu3.model.MedicationRequest;
import org.hl7.fhir.dstu3.model.Observation;
import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
import org.hl7.fhir.dstu3.model.Organization;
import org.hl7.fhir.dstu3.model.Patient;
import org.hl7.fhir.dstu3.model.Period;
import org.hl7.fhir.dstu3.model.Practitioner;
import org.hl7.fhir.dstu3.model.ProcedureRequest;
import org.hl7.fhir.dstu3.model.Quantity;
import org.hl7.fhir.dstu3.model.Range;
import org.hl7.fhir.dstu3.model.Reference;
import org.hl7.fhir.dstu3.model.SimpleQuantity;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.Subscription;
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType;
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus;
import org.hl7.fhir.dstu3.model.Substance;
import org.hl7.fhir.dstu3.model.Task;
import org.hl7.fhir.dstu3.model.Timing;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsInRelativeOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.stringContainsInOrder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock;
@ContextConfiguration(classes = TestHibernateSearchAddInConfig.NoFT.class)
@SuppressWarnings("unchecked")
public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3SearchNoFtTest.class);
@BeforeEach
public void beforeDisableResultReuse() {
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
myDaoConfig.setFetchSizeDefaultMaximum(new DaoConfig().getFetchSizeDefaultMaximum());
}
/**
* See #441
*/
@Test
public void testChainedMedication() {
Medication medication = new Medication();
medication.getCode().addCoding().setSystem("SYSTEM").setCode("04823543");
IIdType medId = myMedicationDao.create(medication).getId().toUnqualifiedVersionless();
MedicationAdministration ma = new MedicationAdministration();
ma.setMedication(new Reference(medId));
IIdType moId = myMedicationAdministrationDao.create(ma).getId().toUnqualified();
SearchParameterMap map = new SearchParameterMap();
map.add(MedicationAdministration.SP_MEDICATION, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().add(new ReferenceParam("code", "04823543"))));
IBundleProvider results = myMedicationAdministrationDao.search(map);
List<String> ids = toUnqualifiedIdValues(results);
assertThat(ids, contains(moId.getValue()));
}
@Test
public void testEmptyChain() {
SearchParameterMap map = new SearchParameterMap();
map.add(Encounter.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().add(new ReferenceParam("subject", "04823543").setChain("identifier"))));
IBundleProvider results = myMedicationAdministrationDao.search(map);
List<String> ids = toUnqualifiedIdValues(results);
assertThat(ids, empty());
}
@Test
public void testChainWithMultipleTypePossibilities() {
Patient sub1 = new Patient();
sub1.setActive(true);
sub1.addIdentifier().setSystem("foo").setValue("bar");
String sub1Id = myPatientDao.create(sub1).getId().toUnqualifiedVersionless().getValue();
Group sub2 = new Group();
sub2.setActive(true);
sub2.addIdentifier().setSystem("foo").setValue("bar");
String sub2Id = myGroupDao.create(sub2).getId().toUnqualifiedVersionless().getValue();
Encounter enc1 = new Encounter();
enc1.getSubject().setReference(sub1Id);
String enc1Id = myEncounterDao.create(enc1).getId().toUnqualifiedVersionless().getValue();
Encounter enc2 = new Encounter();
enc2.getSubject().setReference(sub2Id);
String enc2Id = myEncounterDao.create(enc2).getId().toUnqualifiedVersionless().getValue();
List<String> ids;
SearchParameterMap map;
IBundleProvider results;
map = new SearchParameterMap();
map.add(Encounter.SP_SUBJECT, new ReferenceParam("subject", "foo|bar").setChain("identifier"));
results = myEncounterDao.search(map);
ids = toUnqualifiedVersionlessIdValues(results);
assertThat(ids, hasItems(enc1Id, enc2Id));
map = new SearchParameterMap();
map.add(Encounter.SP_SUBJECT, new ReferenceParam("subject:Patient", "foo|bar").setChain("identifier"));
results = myEncounterDao.search(map);
ids = toUnqualifiedVersionlessIdValues(results);
assertThat(ids, hasItems(enc1Id));
map = new SearchParameterMap();
map.add(Encounter.SP_SUBJECT, new ReferenceParam("subject:Group", "foo|bar").setChain("identifier"));
results = myEncounterDao.search(map);
ids = toUnqualifiedVersionlessIdValues(results);
assertThat(ids, hasItems(enc2Id));
map = new SearchParameterMap();
map.add(Encounter.SP_SUBJECT, new ReferenceParam("subject", "04823543").setChain("identifier"));
results = myEncounterDao.search(map);
ids = toUnqualifiedVersionlessIdValues(results);
assertThat(ids, empty());
}
@Test
public void testEverythingTimings() {
String methodName = "testEverythingTimings";
Organization org = new Organization();
org.setName(methodName);
IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
Medication med = new Medication();
med.getCode().setText(methodName);
IIdType medId = myMedicationDao.create(med, mySrd).getId().toUnqualifiedVersionless();
Patient pat = new Patient();
pat.addAddress().addLine(methodName);
pat.getManagingOrganization().setReferenceElement(orgId);
IIdType patId = myPatientDao.create(pat, mySrd).getId().toUnqualifiedVersionless();
Patient pat2 = new Patient();
pat2.addAddress().addLine(methodName + "2");
pat2.getManagingOrganization().setReferenceElement(orgId);
IIdType patId2 = myPatientDao.create(pat2, mySrd).getId().toUnqualifiedVersionless();
MedicationRequest mo = new MedicationRequest();
mo.getSubject().setReferenceElement(patId);
mo.setMedication(new Reference(medId));
IIdType moId = myMedicationRequestDao.create(mo, mySrd).getId().toUnqualifiedVersionless();
HttpServletRequest request = mock(HttpServletRequest.class);
IBundleProvider resp = myPatientDao.patientTypeEverything(request, null, null, null, null, null, null, null, mySrd, null);
assertThat(toUnqualifiedVersionlessIds(resp), containsInAnyOrder(orgId, medId, patId, moId, patId2));
request = mock(HttpServletRequest.class);
resp = myPatientDao.patientInstanceEverything(request, patId, null, null, null, null, null, null, null, mySrd);
assertThat(toUnqualifiedVersionlessIds(resp), containsInAnyOrder(orgId, medId, patId, moId));
}
/**
* Per message from David Hay on Skype
*/
@Test
public void testEverythingWithLargeSet() throws Exception {
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
String inputString = IOUtils.toString(getClass().getResourceAsStream("/david_big_bundle.json"), StandardCharsets.UTF_8);
Bundle inputBundle = myFhirContext.newJsonParser().parseResource(Bundle.class, inputString);
inputBundle.setType(BundleType.TRANSACTION);
Set<String> allIds = new TreeSet<>();
for (BundleEntryComponent nextEntry : inputBundle.getEntry()) {
nextEntry.getRequest().setMethod(HTTPVerb.PUT);
UrlUtil.UrlParts parts = UrlUtil.parseUrl(nextEntry.getResource().getId());
nextEntry.getRequest().setUrl(parts.getResourceType() + "/" + parts.getResourceId());
allIds.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
}
mySystemDao.transaction(mySrd, inputBundle);
SearchParameterMap map = new SearchParameterMap();
map.setEverythingMode(EverythingModeEnum.PATIENT_INSTANCE);
IPrimitiveType<Integer> count = new IntegerType(1000);
IBundleProvider everything = myPatientDao.patientInstanceEverything(mySrd.getServletRequest(), new IdType("Patient/A161443"), count, null, null, null, null, null, null, mySrd);
TreeSet<String> ids = new TreeSet<>(toUnqualifiedVersionlessIdValues(everything));
assertThat(ids, hasItem("List/A161444"));
assertThat(ids, hasItem("List/A161468"));
assertThat(ids, hasItem("List/A161500"));
ourLog.info("Expected {} - {}", allIds.size(), allIds);
ourLog.info("Actual {} - {}", ids.size(), ids);
assertEquals(allIds, ids);
ids = new TreeSet<>();
for (int i = 0; i < everything.size(); i++) {
for (IBaseResource next : everything.getResources(i, i + 1)) {
ids.add(next.getIdElement().toUnqualifiedVersionless().getValue());
}
}
assertThat(ids, hasItem("List/A161444"));
assertThat(ids, hasItem("List/A161468"));
assertThat(ids, hasItem("List/A161500"));
ourLog.info("Expected {} - {}", allIds.size(), allIds);
ourLog.info("Actual {} - {}", ids.size(), ids);
assertEquals(allIds, ids);
}
@Test
public void testHasChain() {
Patient p = new Patient();
p.setId("P");
p.setActive(true);
myPatientDao.update(p);
Group group = new Group();
group.setId("G");
group.addMember().getEntity().setReference("Patient/P");
myGroupDao.update(group);
DiagnosticReport dr = new DiagnosticReport();
dr.setId("DR");
dr.getSubject().setReference("Patient/P");
myDiagnosticReportDao.update(dr);
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
ReferenceParam referenceParam = new ReferenceParam();
referenceParam.setValueAsQueryToken(myFhirContext, "subject", "._has:Group:member:_id", "Group/G");
map.add("subject", referenceParam);
List<String> actual = toUnqualifiedVersionlessIdValues(myDiagnosticReportDao.search(map));
assertThat(actual, containsInAnyOrder("DiagnosticReport/DR"));
// http://hapi.fhir.org/baseR4/DiagnosticReport?subject._has:Group:member:_id=52152
}
@SuppressWarnings("unused")
@Test
public void testHasAndHas() {
Patient p1 = new Patient();
p1.setActive(true);
IIdType p1id = myPatientDao.create(p1).getId().toUnqualifiedVersionless();
Patient p2 = new Patient();
p2.setActive(true);
IIdType p2id = myPatientDao.create(p2).getId().toUnqualifiedVersionless();
Observation p1o1 = new Observation();
p1o1.setStatus(ObservationStatus.FINAL);
p1o1.getSubject().setReferenceElement(p1id);
IIdType p1o1id = myObservationDao.create(p1o1).getId().toUnqualifiedVersionless();
Observation p1o2 = new Observation();
p1o2.setEffective(new DateTimeType("2001-01-01"));
p1o2.getSubject().setReferenceElement(p1id);
IIdType p1o2id = myObservationDao.create(p1o2).getId().toUnqualifiedVersionless();
Observation p2o1 = new Observation();
p2o1.setStatus(ObservationStatus.FINAL);
p2o1.getSubject().setReferenceElement(p2id);
IIdType p2o1id = myObservationDao.create(p2o1).getId().toUnqualifiedVersionless();
SearchParameterMap map = new SearchParameterMap();
HasAndListParam hasAnd = new HasAndListParam();
hasAnd.addValue(new HasOrListParam().add(new HasParam("Observation", "subject", "status", "final")));
hasAnd.addValue(new HasOrListParam().add(new HasParam("Observation", "subject", "date", "2001-01-01")));
map.add("_has", hasAnd);
List<String> actual = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(actual, containsInAnyOrder(p1id.getValue()));
}
@Test
public void testHasParameter() {
IIdType pid0;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester").addGiven("Joe");
pid0 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester").addGiven("Joe");
myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
myObservationDao.create(obs, mySrd);
}
{
Device device = new Device();
device.addIdentifier().setValue("DEVICEID");
IIdType devId = myDeviceDao.create(device, mySrd).getId().toUnqualifiedVersionless();
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("NOLINK");
obs.setDevice(new Reference(devId));
myObservationDao.create(obs, mySrd);
}
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation", "subject", "identifier", "urn:system|FOO"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(pid0.getValue()));
// No targets exist
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation", "subject", "identifier", "urn:system|UNKNOWN"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), empty());
// Target exists but doesn't link to us
params = new SearchParameterMap();
params.add("_has", new HasParam("Observation", "subject", "identifier", "urn:system|NOLINK"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), empty());
}
@Test
public void testHasParameterChained() {
IIdType pid0;
{
Device device = new Device();
device.addIdentifier().setSystem("urn:system").setValue("DEVICEID");
IIdType devId = myDeviceDao.create(device, mySrd).getId().toUnqualifiedVersionless();
Patient patient = new Patient();
patient.setGender(AdministrativeGender.MALE);
pid0 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.setDevice(new Reference(devId));
obs.setSubject(new Reference(pid0));
obs.setCode(new CodeableConcept(new Coding("sys", "val", "disp")));
myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params = new SearchParameterMap();
// Target exists and is linked
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation", "subject", "device.identifier", "urn:system|DEVICEID"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(pid0.getValue()));
// No targets exist
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation", "subject", "identifier", "urn:system|UNKNOWN"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), empty());
// Target exists but doesn't link to us
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation", "subject", "identifier", "urn:system|NOLINK"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), empty());
}
@Test
public void testHasParameterInvalidResourceType() {
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation__", "subject", "identifier", "urn:system|FOO"));
try {
myPatientDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1208) + "Invalid resource type: Observation__", e.getMessage());
}
}
@Test
public void testHasParameterInvalidSearchParam() {
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation", "subject", "IIIIDENFIEYR", "urn:system|FOO"));
try {
myPatientDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1209) + "Unknown parameter name: Observation:IIIIDENFIEYR", e.getMessage());
}
}
@Test
public void testHasParameterInvalidTargetPath() {
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_has", new HasParam("Observation", "soooooobject", "identifier", "urn:system|FOO"));
try {
myPatientDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1210) + "Unknown parameter name: Observation:soooooobject", e.getMessage());
}
}
@Test
public void testIndexNoDuplicatesDate() {
Encounter order = new Encounter();
order.addLocation().getPeriod().setStartElement(new DateTimeType("2011-12-12T11:12:12Z")).setEndElement(new DateTimeType("2011-12-12T11:12:12Z"));
order.addLocation().getPeriod().setStartElement(new DateTimeType("2011-12-12T11:12:12Z")).setEndElement(new DateTimeType("2011-12-12T11:12:12Z"));
order.addLocation().getPeriod().setStartElement(new DateTimeType("2011-12-12T11:12:12Z")).setEndElement(new DateTimeType("2011-12-12T11:12:12Z"));
order.addLocation().getPeriod().setStartElement(new DateTimeType("2011-12-11T11:12:12Z")).setEndElement(new DateTimeType("2011-12-11T11:12:12Z"));
order.addLocation().getPeriod().setStartElement(new DateTimeType("2011-12-11T11:12:12Z")).setEndElement(new DateTimeType("2011-12-11T11:12:12Z"));
order.addLocation().getPeriod().setStartElement(new DateTimeType("2011-12-11T11:12:12Z")).setEndElement(new DateTimeType("2011-12-11T11:12:12Z"));
IIdType id = myEncounterDao.create(order, mySrd).getId().toUnqualifiedVersionless();
List<IIdType> actual = toUnqualifiedVersionlessIds(
myEncounterDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Encounter.SP_LOCATION_PERIOD, new DateParam("2011-12-12T11:12:12Z"))));
assertThat(actual, contains(id));
runInTransaction(() -> {
Class<ResourceIndexedSearchParamDate> type = ResourceIndexedSearchParamDate.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i WHERE i.myMissing = false", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
});
}
@Test
public void testIndexNoDuplicatesNumber() {
Immunization res = new Immunization();
res.addVaccinationProtocol().setDoseSequence(1);
res.addVaccinationProtocol().setDoseSequence(1);
res.addVaccinationProtocol().setDoseSequence(1);
res.addVaccinationProtocol().setDoseSequence(2);
res.addVaccinationProtocol().setDoseSequence(2);
res.addVaccinationProtocol().setDoseSequence(2);
IIdType id = myImmunizationDao.create(res, mySrd).getId().toUnqualifiedVersionless();
List<IIdType> actual = toUnqualifiedVersionlessIds(myImmunizationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Immunization.SP_DOSE_SEQUENCE, new NumberParam("1"))));
assertThat(actual, contains(id));
runInTransaction(() -> {
Class<ResourceIndexedSearchParamNumber> type = ResourceIndexedSearchParamNumber.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
});
}
@Test
public void testIndexNoDuplicatesQuantity() {
Substance res = new Substance();
res.addInstance().getQuantity().setSystem("http://foo").setCode("UNIT").setValue(123);
res.addInstance().getQuantity().setSystem("http://foo").setCode("UNIT").setValue(123);
res.addInstance().getQuantity().setSystem("http://foo2").setCode("UNIT2").setValue(1232);
res.addInstance().getQuantity().setSystem("http://foo2").setCode("UNIT2").setValue(1232);
IIdType id = mySubstanceDao.create(res, mySrd).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamQuantity> type = ResourceIndexedSearchParamQuantity.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(
mySubstanceDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Substance.SP_QUANTITY, new QuantityParam(null, 123, "http://foo", "UNIT"))));
assertThat(actual, contains(id));
}
@Test
public void testIndexNoDuplicatesReference() {
Practitioner pract = new Practitioner();
pract.setId("Practitioner/somepract");
pract.addName().setFamily("SOME PRACT");
myPractitionerDao.update(pract, mySrd);
Practitioner pract2 = new Practitioner();
pract2.setId("Practitioner/somepract2");
pract2.addName().setFamily("SOME PRACT2");
myPractitionerDao.update(pract2, mySrd);
ProcedureRequest res = new ProcedureRequest();
res.addReplaces(new Reference("Practitioner/somepract"));
res.addReplaces(new Reference("Practitioner/somepract"));
res.addReplaces(new Reference("Practitioner/somepract2"));
res.addReplaces(new Reference("Practitioner/somepract2"));
IIdType id = myProcedureRequestDao.create(res, mySrd).getId().toUnqualifiedVersionless();
Class<ResourceLink> type = ResourceLink.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(
myProcedureRequestDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ProcedureRequest.SP_REPLACES, new ReferenceParam("Practitioner/somepract"))));
assertThat(actual, contains(id));
}
@Test
public void testIndexNoDuplicatesString() {
Patient p = new Patient();
p.addAddress().addLine("123 Fake Street");
p.addAddress().addLine("123 Fake Street");
p.addAddress().addLine("123 Fake Street");
p.addAddress().addLine("456 Fake Street");
p.addAddress().addLine("456 Fake Street");
p.addAddress().addLine("456 Fake Street");
IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamString> type = ResourceIndexedSearchParamString.class;
List<ResourceIndexedSearchParamString> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i WHERE i.myMissing = false", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_ADDRESS, new StringParam("123 Fake Street"))));
assertThat(actual, contains(id));
}
@Test
public void testIndexNoDuplicatesToken() {
Patient res = new Patient();
res.addIdentifier().setSystem("http://foo1").setValue("123");
res.addIdentifier().setSystem("http://foo1").setValue("123");
res.addIdentifier().setSystem("http://foo2").setValue("1234");
res.addIdentifier().setSystem("http://foo2").setValue("1234");
IIdType id = myPatientDao.create(res, mySrd).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamToken> type = ResourceIndexedSearchParamToken.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i WHERE i.myMissing = false", type).getResultList();
ourLog.info(toStringMultiline(results));
// This is 3 for now because the FluentPath for Patient:deceased adds a value.. this should
// be corrected at some point, and we'll then drop back down to 2
assertEquals(3, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_IDENTIFIER, new TokenParam("http://foo1", "123"))));
assertThat(actual, contains(id));
}
@Test
public void testIndexNoDuplicatesUri() {
ValueSet res = new ValueSet();
res.setUrl("http://www.example.org/vs");
res.getCompose().addInclude().setSystem("http://foo");
res.getCompose().addInclude().setSystem("http://bar");
res.getCompose().addInclude().setSystem("http://foo");
res.getCompose().addInclude().setSystem("http://bar");
res.getCompose().addInclude().setSystem("http://foo");
res.getCompose().addInclude().setSystem("http://bar");
IIdType id = myValueSetDao.create(res, mySrd).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamUri> type = ResourceIndexedSearchParamUri.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i WHERE i.myMissing = false", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(3, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_REFERENCE, new UriParam("http://foo"))));
assertThat(actual, contains(id));
}
/**
* #454
*/
@Test
public void testIndexWithUtf8Chars() throws IOException {
String input = IOUtils.toString(getClass().getResourceAsStream("/bug454_utf8.json"), StandardCharsets.UTF_8);
CodeSystem cs = (CodeSystem) myFhirContext.newJsonParser().parseResource(input);
myCodeSystemDao.create(cs);
}
@Test
public void testSearchAll() {
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester").addGiven("Joe");
myPatientDao.create(patient, mySrd);
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester").addGiven("John");
myPatientDao.create(patient, mySrd);
}
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
List<IBaseResource> patients = toList(myPatientDao.search(params));
assertTrue(patients.size() >= 2);
}
@Test
public void testSearchByIdParam() {
String id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getValue();
}
String id2;
{
Organization patient = new Organization();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myOrganizationDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getValue();
}
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(id1));
params = new SearchParameterMap();
params.add("_id", new StringParam(id1));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(id1));
params = new SearchParameterMap();
params.add("_id", new StringParam("9999999999999999"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.add("_id", new StringParam(id2));
assertEquals(0, toList(myPatientDao.search(params)).size());
}
@Test
public void testSearchByIdParamAnd() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
IIdType id2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params;
StringAndListParam param;
params = new SearchParameterMap();
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
param.addAnd(new StringOrListParam().addOr(new StringParam(id1.getIdPart())));
params.add("_id", param);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam(id2.getIdPart())));
param.addAnd(new StringOrListParam().addOr(new StringParam(id1.getIdPart())));
params.add("_id", param);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
params = new SearchParameterMap();
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam(id2.getIdPart())));
param.addAnd(new StringOrListParam().addOr(new StringParam("9999999999999")));
params.add("_id", param);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
params = new SearchParameterMap();
param = new StringAndListParam();
param.addAnd(new StringOrListParam().addOr(new StringParam("9999999999999")));
param.addAnd(new StringOrListParam().addOr(new StringParam(id2.getIdPart())));
params.add("_id", param);
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
}
@Test
public void testSearchByIdParamOr() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
long betweenTime = System.currentTimeMillis();
TestUtil.sleepOneClick();
IIdType id2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params = new SearchParameterMap();
// params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
// assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id1.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam("999999999999")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
// With lastupdated
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
params.setLastUpdated(new DateRangeParam(new Date(betweenTime), null));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id2));
}
@Test
public void testSearchByIdParamWrongType() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
IIdType id2;
{
Organization patient = new Organization();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myOrganizationDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
@Test
public void testSearchCode() {
Subscription subs = new Subscription();
subs.setStatus(SubscriptionStatus.ACTIVE);
subs.getChannel().setType(SubscriptionChannelType.WEBSOCKET);
subs.setCriteria("Observation?");
IIdType id = mySubscriptionDao.create(subs, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
assertThat(toUnqualifiedVersionlessIds(mySubscriptionDao.search(params)), contains(id));
params = new SearchParameterMap();
params.add(Subscription.SP_TYPE, new TokenParam(null, SubscriptionChannelType.WEBSOCKET.toCode()));
params.add(Subscription.SP_STATUS, new TokenParam(null, SubscriptionStatus.ACTIVE.toCode()));
assertThat(toUnqualifiedVersionlessIds(mySubscriptionDao.search(params)), contains(id));
params = new SearchParameterMap();
params.add(Subscription.SP_TYPE, new TokenParam(null, SubscriptionChannelType.WEBSOCKET.toCode()));
params.add(Subscription.SP_STATUS, new TokenParam(null, SubscriptionStatus.ACTIVE.toCode() + "2"));
assertThat(toUnqualifiedVersionlessIds(mySubscriptionDao.search(params)), empty());
// Wrong param
params = new SearchParameterMap();
params.add(Subscription.SP_STATUS, new TokenParam(null, SubscriptionChannelType.WEBSOCKET.toCode()));
assertThat(toUnqualifiedVersionlessIds(mySubscriptionDao.search(params)), empty());
}
@Test
public void testSearchCompositeParam() {
Observation o1 = new Observation();
o1.getCode().addCoding().setSystem("foo").setCode("testSearchCompositeParamN01");
o1.setValue(new StringType("testSearchCompositeParamS01"));
IIdType id1 = myObservationDao.create(o1, mySrd).getId();
Observation o2 = new Observation();
o2.getCode().addCoding().setSystem("foo").setCode("testSearchCompositeParamN01");
o2.setValue(new StringType("testSearchCompositeParamS02"));
IIdType id2 = myObservationDao.create(o2, mySrd).getId();
{
TokenParam v0 = new TokenParam("foo", "testSearchCompositeParamN01");
StringParam v1 = new StringParam("testSearchCompositeParamS01");
CompositeParam<TokenParam, StringParam> val = new CompositeParam<>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_CODE_VALUE_STRING, val));
assertEquals(1, result.size().intValue());
assertEquals(id1.toUnqualifiedVersionless(), result.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless());
}
{
TokenParam v0 = new TokenParam("foo", "testSearchCompositeParamN01");
StringParam v1 = new StringParam("testSearchCompositeParamS02");
CompositeParam<TokenParam, StringParam> val = new CompositeParam<TokenParam, StringParam>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_CODE_VALUE_STRING, val));
assertEquals(1, result.size().intValue());
assertEquals(id2.toUnqualifiedVersionless(), result.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless());
}
}
@Test
public void testSearchCompositeParamDate() {
Observation o1 = new Observation();
o1.getCode().addCoding().setSystem("foo").setCode("testSearchCompositeParamDateN01");
o1.setValue(new Period().setStartElement(new DateTimeType("2001-01-01T11:11:11Z")).setEndElement(new DateTimeType("2001-01-01T12:11:11Z")));
IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless();
Observation o2 = new Observation();
o2.getCode().addCoding().setSystem("foo").setCode("testSearchCompositeParamDateN01");
o2.setValue(new Period().setStartElement(new DateTimeType("2001-01-02T11:11:11Z")).setEndElement(new DateTimeType("2001-01-02T12:11:11Z")));
IIdType id2 = myObservationDao.create(o2, mySrd).getId().toUnqualifiedVersionless();
{
TokenParam v0 = new TokenParam("foo", "testSearchCompositeParamDateN01");
DateParam v1 = new DateParam("2001-01-01");
CompositeParam<TokenParam, DateParam> val = new CompositeParam<TokenParam, DateParam>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_CODE_VALUE_DATE, val));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id1));
}
{
TokenParam v0 = new TokenParam("foo", "testSearchCompositeParamDateN01");
DateParam v1 = new DateParam(">2001-01-01T10:12:12Z");
CompositeParam<TokenParam, DateParam> val = new CompositeParam<TokenParam, DateParam>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_CODE_VALUE_DATE, val));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id1, id2));
}
{
TokenParam v0 = new TokenParam("foo", "testSearchCompositeParamDateN01");
DateParam v1 = new DateParam("gt2001-01-01T11:12:12Z");
CompositeParam<TokenParam, DateParam> val = new CompositeParam<TokenParam, DateParam>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_CODE_VALUE_DATE, val));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id1, id2));
}
{
TokenParam v0 = new TokenParam("foo", "testSearchCompositeParamDateN01");
DateParam v1 = new DateParam("gt2001-01-01T15:12:12Z");
CompositeParam<TokenParam, DateParam> val = new CompositeParam<>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_CODE_VALUE_DATE, val));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id2));
}
}
@Test
public void testSearchCompositeParamQuantity() {
//@formatter:off
Observation o1 = new Observation();
o1.addComponent()
.setCode(new CodeableConcept().addCoding(new Coding().setSystem("http://foo").setCode("code1")))
.setValue(new Quantity().setSystem("http://bar").setCode("code1").setValue(100));
o1.addComponent()
.setCode(new CodeableConcept().addCoding(new Coding().setSystem("http://foo").setCode("code2")))
.setValue(new Quantity().setSystem("http://bar").setCode("code2").setValue(100));
IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless();
Observation o2 = new Observation();
o2.addComponent()
.setCode(new CodeableConcept().addCoding(new Coding().setSystem("http://foo").setCode("code1")))
.setValue(new Quantity().setSystem("http://bar").setCode("code1").setValue(200));
o2.addComponent()
.setCode(new CodeableConcept().addCoding(new Coding().setSystem("http://foo").setCode("code3")))
.setValue(new Quantity().setSystem("http://bar").setCode("code2").setValue(200));
IIdType id2 = myObservationDao.create(o2, mySrd).getId().toUnqualifiedVersionless();
//@formatter:on
String param = Observation.SP_COMPONENT_CODE_VALUE_QUANTITY;
{
TokenParam v0 = new TokenParam("http://foo", "code1");
QuantityParam v1 = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, 150, "http://bar", "code1");
CompositeParam<TokenParam, QuantityParam> val = new CompositeParam<>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(param, val));
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(id2.getValue()));
}
{
TokenParam v0 = new TokenParam("http://foo", "code1");
QuantityParam v1 = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, 50, "http://bar", "code1");
CompositeParam<TokenParam, QuantityParam> val = new CompositeParam<>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(param, val));
assertThat(toUnqualifiedVersionlessIdValues(result), containsInAnyOrder(id1.getValue(), id2.getValue()));
}
{
TokenParam v0 = new TokenParam("http://foo", "code4");
QuantityParam v1 = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, 50, "http://bar", "code1");
CompositeParam<TokenParam, QuantityParam> val = new CompositeParam<>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(param, val));
assertThat(toUnqualifiedVersionlessIdValues(result), empty());
}
{
TokenParam v0 = new TokenParam("http://foo", "code1");
QuantityParam v1 = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, 50, "http://bar", "code4");
CompositeParam<TokenParam, QuantityParam> val = new CompositeParam<>(v0, v1);
IBundleProvider result = myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(param, val));
assertThat(toUnqualifiedVersionlessIdValues(result), empty());
}
}
@Test
public void testSearchDate_TimingValueUsingPeriod() {
ProcedureRequest p1 = new ProcedureRequest();
p1.setOccurrence(new Timing());
p1.getOccurrenceTiming().getRepeat().setBounds(new Period());
p1.getOccurrenceTiming().getRepeat().getBoundsPeriod().getStartElement().setValueAsString("2018-01-01");
p1.getOccurrenceTiming().getRepeat().getBoundsPeriod().getEndElement().setValueAsString("2018-02-01");
String id1 = myProcedureRequestDao.create(p1).getId().toUnqualifiedVersionless().getValue();
{
SearchParameterMap map = new SearchParameterMap()
.setLoadSynchronous(true)
.add(ProcedureRequest.SP_OCCURRENCE, new DateParam("lt2019"));
IBundleProvider found = myProcedureRequestDao.search(map);
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id1));
assertEquals(1, found.size().intValue());
}
}
@Test
public void testSearchDateWrongParam() {
Patient p1 = new Patient();
p1.getBirthDateElement().setValueAsString("1980-01-01");
String id1 = myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue();
Patient p2 = new Patient();
p2.setDeceased(new DateTimeType("1980-01-01"));
String id2 = myPatientDao.create(p2).getId().toUnqualifiedVersionless().getValue();
{
IBundleProvider found = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_BIRTHDATE, new DateParam("1980-01-01")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id1));
assertEquals(1, found.size().intValue());
}
{
IBundleProvider found = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_DEATH_DATE, new DateParam("1980-01-01")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id2));
assertEquals(1, found.size().intValue());
}
}
@Test
public void testDatePeriodParamEndOnly() {
{
Encounter enc = new Encounter();
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("02");
enc.getPeriod().getEndElement().setValueAsString("2001-01-02");
myEncounterDao.create(enc, mySrd);
}
SearchParameterMap params;
List<Encounter> encs;
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
// encs = toList(ourEncounterDao.search(params));
// assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
encs = toList(myEncounterDao.search(params));
assertEquals(0, encs.size());
}
@Test
public void testDatePeriodParamStartAndEnd() {
{
Encounter enc = new Encounter();
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("03");
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
enc.getPeriod().getEndElement().setValueAsString("2001-01-03");
myEncounterDao.create(enc, mySrd);
}
SearchParameterMap params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
List<Encounter> encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-02", "2001-01-06"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-05"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
encs = toList(myEncounterDao.search(params));
assertEquals(0, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-05", null));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
encs = toList(myEncounterDao.search(params));
assertEquals(0, encs.size());
}
@Test
public void testDatePeriodParamStartOnly() {
{
Encounter enc = new Encounter();
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("01");
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
myEncounterDao.create(enc, mySrd);
}
SearchParameterMap params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
List<Encounter> encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
encs = toList(myEncounterDao.search(params));
assertEquals(0, encs.size());
params = new SearchParameterMap();
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
encs = toList(myEncounterDao.search(params));
assertEquals(1, encs.size());
}
/**
* #222
*/
@Test
public void testSearchForDeleted() {
{
Patient patient = new Patient();
patient.setId("TEST");
patient.setLanguageElement(new CodeType("TEST"));
patient.addName().setFamily("TEST");
patient.addIdentifier().setSystem("TEST").setValue("TEST");
myPatientDao.update(patient, mySrd);
}
SearchParameterMap params;
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_id", new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_NAME, new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
myPatientDao.delete(new IdType("Patient/TEST"), mySrd);
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add("_id", new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_NAME, new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
}
@Test
public void testSearchForUnknownAlphanumericId() {
{
SearchParameterMap map = new SearchParameterMap();
map.add("_id", new StringParam("testSearchForUnknownAlphanumericId"));
IBundleProvider retrieved = myPatientDao.search(map);
assertEquals(0, retrieved.size().intValue());
}
}
@Test
public void testSearchLastUpdatedParam() {
String methodName = "testSearchLastUpdatedParam";
DateTimeType beforeAny = new DateTimeType(new Date(), TemporalPrecisionEnum.MILLI);
TestUtil.sleepOneClick();
IIdType id1a;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily(methodName).addGiven("Joe");
id1a = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
IIdType id1b;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily(methodName + "XXXX").addGiven("Joe");
id1b = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
TestUtil.sleepOneClick();
DateTimeType beforeR2 = new DateTimeType(new Date(), TemporalPrecisionEnum.MILLI);
TestUtil.sleepOneClick();
IIdType id2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily(methodName).addGiven("John");
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, hasItems(id1a, id1b, id2));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLastUpdated(new DateRangeParam(beforeAny, null));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, hasItems(id1a, id1b, id2));
}
{
SearchParameterMap params = SearchParameterMap.newSynchronous();
params.setLastUpdated(new DateRangeParam(beforeR2, null));
myCaptureQueriesListener.clear();
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
myCaptureQueriesListener.logSelectQueriesForCurrentThread(0);
assertThat(patients, hasItems(id2));
assertThat(patients, not(hasItems(id1a, id1b)));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLastUpdated(new DateRangeParam(beforeAny, beforeR2));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients.toString(), patients, not(hasItems(id2)));
assertThat(patients.toString(), patients, (hasItems(id1a, id1b)));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLastUpdated(new DateRangeParam(null, beforeR2));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, (hasItems(id1a, id1b)));
assertThat(patients, not(hasItems(id2)));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLastUpdated(new DateRangeParam(new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, beforeR2)));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, not(hasItems(id1a, id1b)));
assertThat(patients, (hasItems(id2)));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLastUpdated(new DateRangeParam(new DateParam(ParamPrefixEnum.LESSTHAN_OR_EQUALS, beforeR2)));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, (hasItems(id1a, id1b)));
assertThat(patients, not(hasItems(id2)));
}
}
@Test
public void testSearchLastUpdatedParamWithComparator() {
IIdType id0;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id0 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
TestUtil.sleepOneClick();
long start = System.currentTimeMillis();
TestUtil.sleepOneClick();
IIdType id1a;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1a = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
IIdType id1b;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1b = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
ourLog.info("Res 1: {}", myPatientDao.read(id0, mySrd).getMeta().getLastUpdatedElement().getValueAsString());
ourLog.info("Res 2: {}", myPatientDao.read(id1a, mySrd).getMeta().getLastUpdatedElement().getValueAsString());
ourLog.info("Res 3: {}", myPatientDao.read(id1b, mySrd).getMeta().getLastUpdatedElement().getValueAsString());
TestUtil.sleepOneClick();
long end = System.currentTimeMillis();
SearchParameterMap map;
Date startDate = new Date(start);
Date endDate = new Date(end);
DateTimeType startDateTime = new DateTimeType(startDate, TemporalPrecisionEnum.MILLI);
DateTimeType endDateTime = new DateTimeType(endDate, TemporalPrecisionEnum.MILLI);
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(startDateTime, endDateTime));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, startDateTime), new DateParam(ParamPrefixEnum.LESSTHAN_OR_EQUALS, endDateTime)));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(ParamPrefixEnum.GREATERTHAN, startDateTime), new DateParam(ParamPrefixEnum.LESSTHAN, endDateTime)));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(ParamPrefixEnum.GREATERTHAN, startDateTime.getValue()),
new DateParam(ParamPrefixEnum.LESSTHAN, myPatientDao.read(id1b, mySrd).getMeta().getLastUpdatedElement().getValue())));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a));
}
@Test
public void testSearchNameParam() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("testSearchNameParam01Fam").addGiven("testSearchNameParam01Giv");
id1 = myPatientDao.create(patient, mySrd).getId();
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("testSearchNameParam02Fam").addGiven("testSearchNameParam02Giv");
myPatientDao.create(patient, mySrd);
}
SearchParameterMap params;
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_FAMILY, new StringParam("testSearchNameParam01Fam"));
List<Patient> patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
assertEquals(id1.getIdPart(), patients.get(0).getIdElement().getIdPart());
// Given name shouldn't return for family param
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_FAMILY, new StringParam("testSearchNameParam01Giv"));
patients = toList(myPatientDao.search(params));
assertEquals(0, patients.size());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_NAME, new StringParam("testSearchNameParam01Fam"));
patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
assertEquals(id1.getIdPart(), patients.get(0).getIdElement().getIdPart());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_NAME, new StringParam("testSearchNameParam01Giv"));
patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
assertEquals(id1.getIdPart(), patients.get(0).getIdElement().getIdPart());
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_FAMILY, new StringParam("testSearchNameParam01Foo"));
patients = toList(myPatientDao.search(params));
assertEquals(0, patients.size());
}
/**
* TODO: currently this doesn't index, we should get it working
*/
@Test
public void testSearchNearParam() {
{
Location loc = new Location();
loc.getPosition().setLatitude(43.7);
loc.getPosition().setLatitude(79.4);
myLocationDao.create(loc, mySrd);
}
}
@Test
public void testSearchNumberParam() {
Encounter e1 = new Encounter();
e1.addIdentifier().setSystem("foo").setValue("testSearchNumberParam01");
e1.getLength().setSystem(SearchParamConstants.UCUM_NS).setCode("min").setValue(4.0 * 24 * 60);
IIdType id1 = myEncounterDao.create(e1, mySrd).getId();
Encounter e2 = new Encounter();
e2.addIdentifier().setSystem("foo").setValue("testSearchNumberParam02");
e2.getLength().setSystem(SearchParamConstants.UCUM_NS).setCode("year").setValue(2.0);
IIdType id2 = myEncounterDao.create(e2, mySrd).getId();
{
IBundleProvider found = myEncounterDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Encounter.SP_LENGTH, new NumberParam(">2")));
assertEquals(2, found.size().intValue());
assertThat(toUnqualifiedVersionlessIds(found), containsInAnyOrder(id1.toUnqualifiedVersionless(), id2.toUnqualifiedVersionless()));
}
{
IBundleProvider found = myEncounterDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Encounter.SP_LENGTH, new NumberParam("<1")));
assertEquals(0, found.size().intValue());
}
{
IBundleProvider found = myEncounterDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Encounter.SP_LENGTH, new NumberParam("4")));
assertEquals(1, found.size().intValue());
assertThat(toUnqualifiedVersionlessIds(found), containsInAnyOrder(id1.toUnqualifiedVersionless()));
}
}
@Test
public void testSearchNumberWrongParam() {
ImmunizationRecommendation ir1 = new ImmunizationRecommendation();
ir1.addRecommendation().setDoseNumber(1);
String id1 = myImmunizationRecommendationDao.create(ir1).getId().toUnqualifiedVersionless().getValue();
ImmunizationRecommendation ir2 = new ImmunizationRecommendation();
ir2.addRecommendation().setDoseNumber(2);
String id2 = myImmunizationRecommendationDao.create(ir2).getId().toUnqualifiedVersionless().getValue();
{
IBundleProvider found = myImmunizationRecommendationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ImmunizationRecommendation.SP_DOSE_NUMBER, new NumberParam("1")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id1));
assertEquals(1, found.size().intValue());
}
{
IBundleProvider found = myImmunizationRecommendationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ImmunizationRecommendation.SP_DOSE_SEQUENCE, new NumberParam("1")));
assertThat(toUnqualifiedVersionlessIdValues(found), empty());
assertEquals(0, found.size().intValue());
}
}
/**
* When a valueset expansion returns no codes
*/
@Test
public void testSearchOnCodesWithNone() {
ValueSet vs = new ValueSet();
vs.setUrl("urn:testSearchOnCodesWithNone");
myValueSetDao.create(vs);
Patient p1 = new Patient();
p1.setGender(AdministrativeGender.MALE);
String id1 = myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue();
Patient p2 = new Patient();
p2.setGender(AdministrativeGender.FEMALE);
String id2 = myPatientDao.create(p2).getId().toUnqualifiedVersionless().getValue();
{
IBundleProvider found = myPatientDao
.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_GENDER, new TokenParam().setModifier(TokenParamModifier.IN).setValue("urn:testSearchOnCodesWithNone")));
assertThat(toUnqualifiedVersionlessIdValues(found), empty());
assertEquals(0, found.size().intValue());
}
}
@Test
public void testSearchParamChangesType() {
String name = "testSearchParamChangesType";
IIdType id;
{
Patient patient = new Patient();
patient.addName().setFamily(name);
id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params;
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_FAMILY, new StringParam(name));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, contains(id));
Patient patient = new Patient();
patient.addIdentifier().setSystem(name).setValue(name);
patient.setId(id);
myPatientDao.update(patient, mySrd);
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_FAMILY, new StringParam(name));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, not(contains(id)));
}
@Test
public void testSearchPractitionerPhoneAndEmailParam() {
String methodName = "testSearchPractitionerPhoneAndEmailParam";
IIdType id1;
{
Practitioner patient = new Practitioner();
patient.addName().setFamily(methodName);
patient.addTelecom().setSystem(ContactPointSystem.PHONE).setValue("123");
id1 = myPractitionerDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
IIdType id2;
{
Practitioner patient = new Practitioner();
patient.addName().setFamily(methodName);
patient.addTelecom().setSystem(ContactPointSystem.EMAIL).setValue("abc");
id2 = myPractitionerDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params;
List<IIdType> patients;
params = new SearchParameterMap();
params.add(Practitioner.SP_FAMILY, new StringParam(methodName));
params.add(Practitioner.SP_EMAIL, new TokenParam(null, "123"));
patients = toUnqualifiedVersionlessIds(myPractitionerDao.search(params));
assertEquals(0, patients.size());
params = new SearchParameterMap();
params.add(Practitioner.SP_FAMILY, new StringParam(methodName));
patients = toUnqualifiedVersionlessIds(myPractitionerDao.search(params));
assertEquals(2, patients.size());
assertThat(patients, containsInAnyOrder(id1, id2));
params = new SearchParameterMap();
params.add(Practitioner.SP_FAMILY, new StringParam(methodName));
params.add(Practitioner.SP_EMAIL, new TokenParam(null, "abc"));
patients = toUnqualifiedVersionlessIds(myPractitionerDao.search(params));
assertEquals(1, patients.size());
assertThat(patients, containsInAnyOrder(id2));
params = new SearchParameterMap();
params.add(Practitioner.SP_FAMILY, new StringParam(methodName));
params.add(Practitioner.SP_PHONE, new TokenParam(null, "123"));
patients = toUnqualifiedVersionlessIds(myPractitionerDao.search(params));
assertEquals(1, patients.size());
assertThat(patients, containsInAnyOrder(id1));
}
@Test
public void testSearchQuantityWrongParam() {
Condition c1 = new Condition();
c1.setAbatement(new Range().setLow((SimpleQuantity) new SimpleQuantity().setValue(1L)).setHigh((SimpleQuantity) new SimpleQuantity().setValue(1L)));
String id1 = myConditionDao.create(c1).getId().toUnqualifiedVersionless().getValue();
Condition c2 = new Condition();
c2.setOnset(new Range().setLow((SimpleQuantity) new SimpleQuantity().setValue(1L)).setHigh((SimpleQuantity) new SimpleQuantity().setValue(1L)));
String id2 = myConditionDao.create(c2).getId().toUnqualifiedVersionless().getValue();
{
IBundleProvider found = myConditionDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Condition.SP_ABATEMENT_AGE, new QuantityParam("1")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id1));
assertEquals(1, found.size().intValue());
}
{
IBundleProvider found = myConditionDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Condition.SP_ONSET_AGE, new QuantityParam("1")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id2));
assertEquals(1, found.size().intValue());
}
}
@Test
public void testSearchResourceLinkWithChain() {
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithChainXX");
patient.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithChain01");
IIdType patientId01 = myPatientDao.create(patient, mySrd).getId();
Patient patient02 = new Patient();
patient02.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithChainXX");
patient02.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithChain02");
IIdType patientId02 = myPatientDao.create(patient02, mySrd).getId();
Observation obs01 = new Observation();
obs01.setEffective(new DateTimeType(new Date()));
obs01.setSubject(new Reference(patientId01));
IIdType obsId01 = myObservationDao.create(obs01, mySrd).getId();
Observation obs02 = new Observation();
obs02.setEffective(new DateTimeType(new Date()));
obs02.setSubject(new Reference(patientId02));
IIdType obsId02 = myObservationDao.create(obs02, mySrd).getId();
// Create another type, that shouldn't be returned
DiagnosticReport dr01 = new DiagnosticReport();
dr01.setSubject(new Reference(patientId01));
IIdType drId01 = myDiagnosticReportDao.create(dr01, mySrd).getId();
ourLog.info("P1[{}] P2[{}] O1[{}] O2[{}] D1[{}]", patientId01, patientId02, obsId01, obsId02, drId01);
List<Observation> result = toList(myObservationDao
.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_IDENTIFIER, "urn:system|testSearchResourceLinkWithChain01"))));
assertEquals(1, result.size());
assertEquals(obsId01.getIdPart(), result.get(0).getIdElement().getIdPart());
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_PATIENT, new ReferenceParam(patientId01.getIdPart()))));
assertEquals(1, result.size());
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_PATIENT, new ReferenceParam(patientId01.getIdPart()))));
assertEquals(1, result.size());
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_IDENTIFIER, "999999999999"))));
assertEquals(0, result.size());
result = toList(myObservationDao
.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_IDENTIFIER, "urn:system|testSearchResourceLinkWithChainXX"))));
assertEquals(2, result.size());
result = toList(
myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_IDENTIFIER, "testSearchResourceLinkWithChainXX"))));
assertEquals(2, result.size());
result = toList(
myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_IDENTIFIER, "|testSearchResourceLinkWithChainXX"))));
assertEquals(0, result.size());
}
@Test
public void testSearchResourceLinkWithChainDouble() {
String methodName = "testSearchResourceLinkWithChainDouble";
Organization org = new Organization();
org.setName(methodName);
IIdType orgId01 = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
Location locParent = new Location();
locParent.setManagingOrganization(new Reference(orgId01));
IIdType locParentId = myLocationDao.create(locParent, mySrd).getId().toUnqualifiedVersionless();
Location locChild = new Location();
locChild.setPartOf(new Reference(locParentId));
IIdType locChildId = myLocationDao.create(locChild, mySrd).getId().toUnqualifiedVersionless();
Location locGrandchild = new Location();
locGrandchild.setPartOf(new Reference(locChildId));
IIdType locGrandchildId = myLocationDao.create(locGrandchild, mySrd).getId().toUnqualifiedVersionless();
IBundleProvider found;
ReferenceParam param;
found = myLocationDao.search(new SearchParameterMap().setLoadSynchronous(true).add("organization", new ReferenceParam(orgId01.getIdPart())));
assertEquals(1, found.size().intValue());
assertEquals(locParentId, found.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless());
param = new ReferenceParam(orgId01.getIdPart());
param.setChain("organization");
found = myLocationDao.search(new SearchParameterMap().setLoadSynchronous(true).add("partof", param));
assertEquals(1, found.size().intValue());
assertEquals(locChildId, found.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless());
param = new ReferenceParam(orgId01.getIdPart());
param.setChain("partof.organization");
found = myLocationDao.search(new SearchParameterMap().setLoadSynchronous(true).add("partof", param));
assertEquals(1, found.size().intValue());
assertEquals(locGrandchildId, found.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless());
param = new ReferenceParam(methodName);
param.setChain("partof.organization.name");
found = myLocationDao.search(new SearchParameterMap().setLoadSynchronous(true).add("partof", param));
assertEquals(1, found.size().intValue());
assertEquals(locGrandchildId, found.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless());
}
@Test
public void testSearchResourceLinkWithChainWithMultipleTypes() throws Exception {
Patient patient = new Patient();
patient.addName().setFamily("testSearchResourceLinkWithChainWithMultipleTypes01");
patient.addName().setFamily("testSearchResourceLinkWithChainWithMultipleTypesXX");
IIdType patientId01 = myPatientDao.create(patient, mySrd).getId();
Location loc01 = new Location();
loc01.getNameElement().setValue("testSearchResourceLinkWithChainWithMultipleTypes01");
IIdType locId01 = myLocationDao.create(loc01, mySrd).getId();
Observation obs01 = new Observation();
obs01.setEffective(new DateTimeType(new Date()));
obs01.setSubject(new Reference(patientId01));
IIdType obsId01 = myObservationDao.create(obs01, mySrd).getId().toUnqualifiedVersionless();
ca.uhn.fhir.jpa.util.TestUtil.sleepOneClick();
Date between = new Date();
ca.uhn.fhir.jpa.util.TestUtil.sleepOneClick();
Observation obs02 = new Observation();
obs02.setEffective(new DateTimeType(new Date()));
obs02.setSubject(new Reference(locId01));
IIdType obsId02 = myObservationDao.create(obs02, mySrd).getId().toUnqualifiedVersionless();
ca.uhn.fhir.jpa.util.TestUtil.sleepOneClick();
Date after = new Date();
ca.uhn.fhir.jpa.util.TestUtil.sleepOneClick();
ourLog.info("P1[{}] L1[{}] Obs1[{}] Obs2[{}]", patientId01, locId01, obsId01, obsId02);
List<IIdType> result;
SearchParameterMap params;
result = toUnqualifiedVersionlessIds(myObservationDao
.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_NAME, "testSearchResourceLinkWithChainWithMultipleTypesXX"))));
assertThat(result, containsInAnyOrder(obsId01));
assertEquals(1, result.size());
result = toUnqualifiedVersionlessIds(myObservationDao.search(
new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("Patient", Patient.SP_NAME, "testSearchResourceLinkWithChainWithMultipleTypes01"))));
assertThat(result, containsInAnyOrder(obsId01));
assertEquals(1, result.size());
params = new SearchParameterMap();
params.add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_NAME, "testSearchResourceLinkWithChainWithMultipleTypes01"));
result = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertEquals(2, result.size());
assertThat(result, containsInAnyOrder(obsId01, obsId02));
params = new SearchParameterMap();
params.add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_NAME, "testSearchResourceLinkWithChainWithMultipleTypes01"));
params.setLastUpdated(new DateRangeParam(between, after));
result = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertEquals(1, result.size());
assertThat(result, containsInAnyOrder(obsId02));
result = toUnqualifiedVersionlessIds(myObservationDao
.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam(Patient.SP_NAME, "testSearchResourceLinkWithChainWithMultipleTypesYY"))));
assertEquals(0, result.size());
}
@Test
public void testSearchResourceLinkWithTextLogicalId() {
Patient patient = new Patient();
patient.setId("testSearchResourceLinkWithTextLogicalId01");
patient.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithTextLogicalIdXX");
patient.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithTextLogicalId01");
IIdType patientId01 = myPatientDao.update(patient, mySrd).getId();
Patient patient02 = new Patient();
patient02.setId("testSearchResourceLinkWithTextLogicalId02");
patient02.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithTextLogicalIdXX");
patient02.addIdentifier().setSystem("urn:system").setValue("testSearchResourceLinkWithTextLogicalId02");
IIdType patientId02 = myPatientDao.update(patient02, mySrd).getId();
Observation obs01 = new Observation();
obs01.setEffective(new DateTimeType(new Date()));
obs01.setSubject(new Reference(patientId01));
IIdType obsId01 = myObservationDao.create(obs01, mySrd).getId();
Observation obs02 = new Observation();
obs02.setEffective(new DateTimeType(new Date()));
obs02.setSubject(new Reference(patientId02));
IIdType obsId02 = myObservationDao.create(obs02, mySrd).getId();
// Create another type, that shouldn't be returned
DiagnosticReport dr01 = new DiagnosticReport();
dr01.setSubject(new Reference(patientId01));
IIdType drId01 = myDiagnosticReportDao.create(dr01, mySrd).getId();
ourLog.info("P1[{}] P2[{}] O1[{}] O2[{}] D1[{}]", patientId01, patientId02, obsId01, obsId02, drId01);
List<Observation> result = toList(
myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("testSearchResourceLinkWithTextLogicalId01"))));
assertEquals(1, result.size());
assertEquals(obsId01.getIdPart(), result.get(0).getIdElement().getIdPart());
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("testSearchResourceLinkWithTextLogicalId99"))));
assertEquals(0, result.size());
result = toList(myObservationDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Observation.SP_SUBJECT, new ReferenceParam("999999999999999"))));
assertEquals(0, result.size());
}
@SuppressWarnings("unused")
@Test
public void testSearchResourceReferenceMissingChain() {
IIdType oid1;
{
Organization org = new Organization();
org.setActive(true);
oid1 = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType tid1;
{
Task task = new Task();
task.getRequester().setOnBehalfOf(new Reference(oid1));
tid1 = myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
}
IIdType tid2;
{
Task task = new Task();
task.setOwner(new Reference(oid1));
tid2 = myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
}
IIdType oid2;
{
Organization org = new Organization();
org.setActive(true);
org.setName("NAME");
oid2 = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType tid3;
{
Task task = new Task();
task.getRequester().setOnBehalfOf(new Reference(oid2));
tid3 = myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap map;
List<IIdType> ids;
map = new SearchParameterMap();
map.add(Organization.SP_NAME, new StringParam().setMissing(true));
ids = toUnqualifiedVersionlessIds(myOrganizationDao.search(map));
assertThat(ids, contains(oid1));
ourLog.info("Starting Search 2");
map = new SearchParameterMap();
map.add(Task.SP_ORGANIZATION, new ReferenceParam("Organization", "name:missing", "true"));
ids = toUnqualifiedVersionlessIds(myTaskDao.search(map));
assertThat(ids, contains(tid1)); // NOT tid2
map = new SearchParameterMap();
map.add(Task.SP_ORGANIZATION, new ReferenceParam("Organization", "name:missing", "false"));
ids = toUnqualifiedVersionlessIds(myTaskDao.search(map));
assertThat(ids, contains(tid3));
map = new SearchParameterMap();
map.add(Task.SP_ORGANIZATION, new ReferenceParam("Organization", "name:missing", "true"));
ids = toUnqualifiedVersionlessIds(myPatientDao.search(map));
assertThat(ids, empty());
}
@SuppressWarnings("unused")
@Test
public void testSearchResourceReferenceOnlyCorrectPath() {
IIdType oid1;
{
Organization org = new Organization();
org.setActive(true);
oid1 = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType tid1;
{
Task task = new Task();
task.getRequester().setOnBehalfOf(new Reference(oid1));
tid1 = myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
}
IIdType tid2;
{
Task task = new Task();
task.setOwner(new Reference(oid1));
tid2 = myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap map;
List<IIdType> ids;
map = new SearchParameterMap();
map.add(Task.SP_ORGANIZATION, new ReferenceParam(oid1.getValue()));
ids = toUnqualifiedVersionlessIds(myTaskDao.search(map));
assertThat(ids, contains(tid1)); // NOT tid2
}
@Test
public void testSearchStringParamDoesntMatchWrongType() throws Exception {
IIdType pid1;
IIdType pid2;
{
Patient patient = new Patient();
patient.addName().setFamily("HELLO");
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Practitioner patient = new Practitioner();
patient.addName().setFamily("HELLO");
pid2 = myPractitionerDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params;
List<IIdType> patients;
params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("HELLO"));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInAnyOrder(pid1));
assertThat(patients, not(containsInAnyOrder(pid2)));
}
@Test
public void testSearchWithFetchSizeDefaultMaximum() {
myDaoConfig.setFetchSizeDefaultMaximum(5);
for (int i = 0; i < 10; i++) {
Patient p = new Patient();
p.addName().setFamily("PT" + i);
myPatientDao.create(p);
}
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
IBundleProvider values = myPatientDao.search(map);
assertEquals(null, values.size());
assertEquals(5, values.getResources(0, 1000).size());
}
@Test
public void testSearchStringParam() throws Exception {
IIdType pid1;
IIdType pid2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester_testSearchStringParam").addGiven("Joe");
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
TestUtil.sleepOneClick();
Date between = new Date();
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester_testSearchStringParam").addGiven("John");
pid2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
TestUtil.sleepOneClick();
Date after = new Date();
SearchParameterMap params;
List<IIdType> patients;
params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_testSearchStringParam"));
params.setLastUpdated(new DateRangeParam(between, after));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInAnyOrder(pid2));
params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_testSearchStringParam"));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInAnyOrder(pid1, pid2));
assertEquals(2, patients.size());
params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("FOO_testSearchStringParam"));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertEquals(0, patients.size());
}
@Test
public void testSearchStringParamReallyLong() {
String methodName = "testSearchStringParamReallyLong";
String value = StringUtils.rightPad(methodName, 200, 'a');
IIdType longId;
IIdType shortId;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily(value);
longId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
shortId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap params;
params = new SearchParameterMap();
params.setLoadSynchronous(true);
String substring = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
params.add(Patient.SP_FAMILY, new StringParam(substring));
IBundleProvider found = myPatientDao.search(params);
assertEquals(1, toList(found).size());
assertThat(toUnqualifiedVersionlessIds(found), contains(longId));
assertThat(toUnqualifiedVersionlessIds(found), not(contains(shortId)));
}
@Test
public void testSearchStringParamWithNonNormalized() {
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().addGiven("testSearchStringParamWithNonNormalized_h\u00F6ra");
myPatientDao.create(patient, mySrd);
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().addGiven("testSearchStringParamWithNonNormalized_HORA");
myPatientDao.create(patient, mySrd);
}
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_GIVEN, new StringParam("testSearchStringParamWithNonNormalized_hora"));
List<Patient> patients = toList(myPatientDao.search(params));
assertEquals(2, patients.size());
StringParam parameter = new StringParam("testSearchStringParamWithNonNormalized_hora");
parameter.setExact(true);
params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_GIVEN, parameter);
patients = toList(myPatientDao.search(params));
assertEquals(0, patients.size());
}
@Test
public void testSearchStringWrongParam() throws Exception {
Patient p1 = new Patient();
p1.getNameFirstRep().setFamily("AAA");
String id1 = myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue();
Patient p2 = new Patient();
p2.getNameFirstRep().addGiven("AAA");
String id2 = myPatientDao.create(p2).getId().toUnqualifiedVersionless().getValue();
{
IBundleProvider found = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_FAMILY, new StringParam("AAA")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id1));
assertEquals(1, found.size().intValue());
}
{
IBundleProvider found = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_GIVEN, new StringParam("AAA")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id2));
assertEquals(1, found.size().intValue());
}
}
@Test
public void testSearchTokenParam() {
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("testSearchTokenParam001");
patient.addName().setFamily("Tester").addGiven("testSearchTokenParam1");
patient.addCommunication().getLanguage().setText("testSearchTokenParamComText").addCoding().setCode("testSearchTokenParamCode").setSystem("testSearchTokenParamSystem")
.setDisplay("testSearchTokenParamDisplay");
myPatientDao.create(patient, mySrd);
patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("testSearchTokenParam002");
patient.addName().setFamily("Tester").addGiven("testSearchTokenParam2");
myPatientDao.create(patient, mySrd);
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_IDENTIFIER, new TokenParam("urn:system", "testSearchTokenParam001"));
IBundleProvider retrieved = myPatientDao.search(map);
assertEquals(1, retrieved.size().intValue());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_IDENTIFIER, new TokenParam(null, "testSearchTokenParam001"));
IBundleProvider retrieved = myPatientDao.search(map);
assertEquals(1, retrieved.size().intValue());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam("testSearchTokenParamSystem", "testSearchTokenParamCode"));
assertEquals(1, myPatientDao.search(map).size().intValue());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamCode", true));
assertEquals(0, myPatientDao.search(map).size().intValue());
}
{
// Complete match
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamComText", true));
assertEquals(1, myPatientDao.search(map).size().intValue());
}
{
// Left match
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamcomtex", true));
assertEquals(1, myPatientDao.search(map).size().intValue());
}
{
// Right match
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamComTex", true));
assertEquals(1, myPatientDao.search(map).size().intValue());
}
{
SearchParameterMap map = new SearchParameterMap();
TokenOrListParam listParam = new TokenOrListParam();
listParam.add("urn:system", "testSearchTokenParam001");
listParam.add("urn:system", "testSearchTokenParam002");
map.add(Patient.SP_IDENTIFIER, listParam);
IBundleProvider retrieved = myPatientDao.search(map);
assertEquals(2, retrieved.size().intValue());
}
{
SearchParameterMap map = new SearchParameterMap();
TokenOrListParam listParam = new TokenOrListParam();
listParam.add(null, "testSearchTokenParam001");
listParam.add("urn:system", "testSearchTokenParam002");
map.add(Patient.SP_IDENTIFIER, listParam);
IBundleProvider retrieved = myPatientDao.search(map);
assertEquals(2, retrieved.size().intValue());
}
}
@Test
public void testSearchTokenParamNoValue() {
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("testSearchTokenParam001");
patient.addName().setFamily("Tester").addGiven("testSearchTokenParam1");
patient.addCommunication().getLanguage().setText("testSearchTokenParamComText").addCoding().setCode("testSearchTokenParamCode").setSystem("testSearchTokenParamSystem")
.setDisplay("testSearchTokenParamDisplay");
myPatientDao.create(patient, mySrd);
patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("testSearchTokenParam002");
patient.addName().setFamily("Tester").addGiven("testSearchTokenParam2");
myPatientDao.create(patient, mySrd);
patient = new Patient();
patient.addIdentifier().setSystem("urn:system2").setValue("testSearchTokenParam002");
patient.addName().setFamily("Tester").addGiven("testSearchTokenParam2");
myPatientDao.create(patient, mySrd);
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_IDENTIFIER, new TokenParam("urn:system", null));
IBundleProvider retrieved = myPatientDao.search(map);
assertEquals(2, retrieved.size().intValue());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_IDENTIFIER, new TokenParam("urn:system", ""));
IBundleProvider retrieved = myPatientDao.search(map);
assertEquals(2, retrieved.size().intValue());
}
}
@Test
public void testSearchTokenWrongParam() throws Exception {
Patient p1 = new Patient();
p1.setGender(AdministrativeGender.MALE);
String id1 = myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue();
Patient p2 = new Patient();
p2.addIdentifier().setValue(AdministrativeGender.MALE.toCode());
String id2 = myPatientDao.create(p2).getId().toUnqualifiedVersionless().getValue();
{
IBundleProvider found = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_GENDER, new TokenParam(null, "male")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id1));
assertEquals(1, found.size().intValue());
}
{
IBundleProvider found = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Patient.SP_IDENTIFIER, new TokenParam(null, "male")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id2));
assertEquals(1, found.size().intValue());
}
}
@Test
public void testSearchUnknownContentParam() {
SearchParameterMap params = new SearchParameterMap();
params.add(Constants.PARAM_CONTENT, new StringParam("fulltext"));
try {
myPatientDao.search(params).getAllResources();
fail();
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1192) + "Fulltext search is not enabled on this service, can not process parameter: _content", e.getMessage());
}
}
@Test
public void testSearchUnknownTextParam() {
SearchParameterMap params = new SearchParameterMap();
params.add(Constants.PARAM_TEXT, new StringParam("fulltext"));
try {
myPatientDao.search(params).getAllResources();
fail();
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1192) + "Fulltext search is not enabled on this service, can not process parameter: _text", e.getMessage());
}
}
@Test
public void testSearchUriWrongParam() {
ValueSet v1 = new ValueSet();
v1.getUrlElement().setValue("http://foo");
String id1 = myValueSetDao.create(v1).getId().toUnqualifiedVersionless().getValue();
ValueSet v2 = new ValueSet();
v2.getExpansion().getIdentifierElement().setValue("http://foo");
v2.getUrlElement().setValue("http://www.example.org/vs");
String id2 = myValueSetDao.create(v2).getId().toUnqualifiedVersionless().getValue();
{
IBundleProvider found = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://foo")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id1));
assertEquals(1, found.size().intValue());
}
{
IBundleProvider found = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_EXPANSION, new UriParam("http://foo")));
assertThat(toUnqualifiedVersionlessIdValues(found), containsInAnyOrder(id2));
assertEquals(1, found.size().intValue());
}
}
@Test
public void testSearchValueQuantity() {
String methodName = "testSearchValueQuantity";
String id1;
{
Observation o = new Observation();
o.getCode().addCoding().setSystem("urn:foo").setCode(methodName + "code");
Quantity q = new Quantity().setSystem("urn:bar:" + methodName).setCode(methodName + "units").setValue(100);
o.setValue(q);
id1 = myObservationDao.create(o, mySrd).getId().toUnqualifiedVersionless().getValue();
}
String id2;
{
Observation o = new Observation();
o.getCode().addCoding().setSystem("urn:foo").setCode(methodName + "code");
Quantity q = new Quantity().setSystem("urn:bar:" + methodName).setCode(methodName + "units").setValue(5);
o.setValue(q);
id2 = myObservationDao.create(o, mySrd).getId().toUnqualifiedVersionless().getValue();
}
SearchParameterMap map;
IBundleProvider found;
QuantityParam param;
map = new SearchParameterMap();
map.setLoadSynchronous(true);
param = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, new BigDecimal("10"), null, null);
map.add(Observation.SP_VALUE_QUANTITY, param);
found = myObservationDao.search(map);
assertThat(toUnqualifiedVersionlessIdValues(found), contains(id1));
map = new SearchParameterMap();
map.setLoadSynchronous(true);
param = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, new BigDecimal("10"), null, methodName + "units");
map.add(Observation.SP_VALUE_QUANTITY, param);
found = myObservationDao.search(map);
assertThat(toUnqualifiedVersionlessIdValues(found), contains(id1));
map = new SearchParameterMap();
map.setLoadSynchronous(true);
param = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, new BigDecimal("10"), "urn:bar:" + methodName, null);
map.add(Observation.SP_VALUE_QUANTITY, param);
found = myObservationDao.search(map);
assertThat(toUnqualifiedVersionlessIdValues(found), contains(id1));
map = new SearchParameterMap();
map.setLoadSynchronous(true);
param = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, new BigDecimal("10"), "urn:bar:" + methodName, methodName + "units");
map.add(Observation.SP_VALUE_QUANTITY, param);
found = myObservationDao.search(map);
assertThat(toUnqualifiedVersionlessIdValues(found), contains(id1));
map = new SearchParameterMap();
map.setLoadSynchronous(true);
param = new QuantityParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, new BigDecimal("1000"), "urn:bar:" + methodName, methodName + "units");
map.add(Observation.SP_VALUE_QUANTITY, param);
found = myObservationDao.search(map);
assertThat(toUnqualifiedVersionlessIdValues(found), empty());
}
@Test
public void testSearchWithDate() {
IIdType orgId = myOrganizationDao.create(new Organization(), mySrd).getId();
IIdType id2;
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester_testSearchStringParam").addGiven("John");
patient.setBirthDateElement(new DateType("2011-01-01"));
patient.getManagingOrganization().setReferenceElement(orgId);
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_BIRTHDATE, new DateParam("2011-01-01"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, contains(id2));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_BIRTHDATE, new DateParam("2011-01-03"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, empty());
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_BIRTHDATE, new DateParam("2011-01-03").setPrefix(ParamPrefixEnum.LESSTHAN));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, contains(id2));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(Patient.SP_BIRTHDATE, new DateParam("2010-01-01").setPrefix(ParamPrefixEnum.LESSTHAN));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, empty());
}
}
@Test
public void testSearchWithEmptySort() {
SearchParameterMap criteriaUrl = new SearchParameterMap();
DateRangeParam range = new DateRangeParam();
range.setLowerBound(new DateParam(ParamPrefixEnum.GREATERTHAN, 1000000));
range.setUpperBound(new DateParam(ParamPrefixEnum.LESSTHAN, 2000000));
criteriaUrl.setLastUpdated(range);
criteriaUrl.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.ASC));
IBundleProvider results = myObservationDao.search(criteriaUrl);
assertEquals(0, results.size().intValue());
}
@Test
public void testSearchWithIncludes() {
String methodName = "testSearchWithIncludes";
IIdType parentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
parentOrgId = myOrganizationDao.create(org, mySrd).getId();
}
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1");
org.setPartOf(new Reference(parentOrgId));
IIdType orgId = myOrganizationDao.create(org, mySrd).getId();
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester_" + methodName + "_P1").addGiven("Joe");
patient.getManagingOrganization().setReferenceElement(orgId);
myPatientDao.create(patient, mySrd);
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester_" + methodName + "_P2").addGiven("John");
myPatientDao.create(patient, mySrd);
}
{
// No includes
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
List<IBaseResource> patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
}
{
// Named include
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(Patient.INCLUDE_ORGANIZATION.asNonRecursive());
IBundleProvider search = myPatientDao.search(params);
List<IBaseResource> patients = toList(search);
assertEquals(2, patients.size());
assertEquals(Patient.class, patients.get(0).getClass());
assertEquals(Organization.class, patients.get(1).getClass());
}
{
// Named include with parent non-recursive
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(Patient.INCLUDE_ORGANIZATION);
params.addInclude(Organization.INCLUDE_PARTOF.asNonRecursive());
IBundleProvider search = myPatientDao.search(params);
List<IBaseResource> patients = toList(search);
assertEquals(2, patients.size());
assertEquals(Patient.class, patients.get(0).getClass());
assertEquals(Organization.class, patients.get(1).getClass());
}
{
// Named include with parent recursive
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(Patient.INCLUDE_ORGANIZATION);
params.addInclude(Organization.INCLUDE_PARTOF.asRecursive());
IBundleProvider search = myPatientDao.search(params);
List<IBaseResource> patients = toList(search);
assertEquals(3, patients.size());
assertEquals(Patient.class, patients.get(0).getClass());
assertEquals(Organization.class, patients.get(1).getClass());
assertEquals(Organization.class, patients.get(2).getClass());
}
{
// * include non recursive
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(IBaseResource.INCLUDE_ALL.asNonRecursive());
IBundleProvider search = myPatientDao.search(params);
List<IBaseResource> patients = toList(search);
assertEquals(2, patients.size());
assertEquals(Patient.class, patients.get(0).getClass());
assertEquals(Organization.class, patients.get(1).getClass());
}
{
// * include recursive
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(IBaseResource.INCLUDE_ALL.asRecursive());
IBundleProvider search = myPatientDao.search(params);
List<IBaseResource> patients = toList(search);
assertEquals(3, patients.size());
assertEquals(Patient.class, patients.get(0).getClass());
assertEquals(Organization.class, patients.get(1).getClass());
assertEquals(Organization.class, patients.get(2).getClass());
}
{
// Irrelevant include
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(Encounter.INCLUDE_EPISODEOFCARE);
IBundleProvider search = myPatientDao.search(params);
List<IBaseResource> patients = toList(search);
assertEquals(1, patients.size());
assertEquals(Patient.class, patients.get(0).getClass());
}
}
@SuppressWarnings("unused")
@Test
public void testSearchWithIncludesParameterNoRecurse() {
String methodName = "testSearchWithIncludes";
IIdType parentParentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
parentParentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType parentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
org.setPartOf(new Reference(parentParentOrgId));
parentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType orgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1");
org.setPartOf(new Reference(parentOrgId));
orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType patientId;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester_" + methodName + "_P1").addGiven("Joe");
patient.getManagingOrganization().setReferenceElement(orgId);
patientId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.add(IAnyResource.SP_RES_ID, new StringParam(orgId.getIdPart()));
params.addInclude(Organization.INCLUDE_PARTOF.asNonRecursive());
List<IIdType> resources = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(resources, contains(orgId, parentOrgId));
}
}
@SuppressWarnings("unused")
@Test
public void testSearchWithIncludesParameterRecurse() {
String methodName = "testSearchWithIncludes";
IIdType parentParentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
parentParentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType parentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
org.setPartOf(new Reference(parentParentOrgId));
parentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType orgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1");
org.setPartOf(new Reference(parentOrgId));
orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType patientId;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester_" + methodName + "_P1").addGiven("Joe");
patient.getManagingOrganization().setReferenceElement(orgId);
patientId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.add(IAnyResource.SP_RES_ID, new StringParam(orgId.getIdPart()));
params.addInclude(Organization.INCLUDE_PARTOF.asRecursive());
List<IIdType> resources = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
ourLog.info(resources.toString());
assertThat(resources, containsInAnyOrder(orgId, parentOrgId, parentParentOrgId));
}
}
@Test
public void testSearchWithIncludesStarNoRecurse() {
String methodName = "testSearchWithIncludes";
IIdType parentParentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
parentParentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType parentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
org.setPartOf(new Reference(parentParentOrgId));
parentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType orgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1");
org.setPartOf(new Reference(parentOrgId));
orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType patientId;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester_" + methodName + "_P1").addGiven("Joe");
patient.getManagingOrganization().setReferenceElement(orgId);
patientId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(new Include("*").asNonRecursive());
List<IIdType> resources = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(resources, contains(patientId, orgId));
}
}
@Test
public void testSearchWithIncludesStarRecurse() {
String methodName = "testSearchWithIncludes";
IIdType parentParentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
parentParentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType parentOrgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1Parent");
org.setPartOf(new Reference(parentParentOrgId));
parentOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType orgId;
{
Organization org = new Organization();
org.getNameElement().setValue(methodName + "_O1");
org.setPartOf(new Reference(parentOrgId));
orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType patientId;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester_" + methodName + "_P1").addGiven("Joe");
patient.getManagingOrganization().setReferenceElement(orgId);
patientId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_" + methodName + "_P1"));
params.addInclude(new Include("*").asRecursive());
List<IIdType> resources = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(resources, containsInAnyOrder(patientId, orgId, parentOrgId, parentParentOrgId));
}
}
/**
* Test for #62
*/
@Test
public void testSearchWithIncludesThatHaveTextId() {
{
Organization org = new Organization();
org.setId("testSearchWithIncludesThatHaveTextIdid1");
org.getNameElement().setValue("testSearchWithIncludesThatHaveTextId_O1");
IIdType orgId = myOrganizationDao.update(org, mySrd).getId();
assertThat(orgId.getValue(), endsWith("Organization/testSearchWithIncludesThatHaveTextIdid1/_history/1"));
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().setFamily("Tester_testSearchWithIncludesThatHaveTextId_P1").addGiven("Joe");
patient.getManagingOrganization().setReferenceElement(orgId);
myPatientDao.create(patient, mySrd);
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester_testSearchWithIncludesThatHaveTextId_P2").addGiven("John");
myPatientDao.create(patient, mySrd);
}
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_testSearchWithIncludesThatHaveTextId_P1"));
params.addInclude(Patient.INCLUDE_ORGANIZATION);
IBundleProvider search = myPatientDao.search(params);
List<IBaseResource> patients = toList(search);
assertEquals(2, patients.size());
assertEquals(Patient.class, patients.get(0).getClass());
assertEquals(Organization.class, patients.get(1).getClass());
params = new SearchParameterMap();
params.add(Patient.SP_FAMILY, new StringParam("Tester_testSearchWithIncludesThatHaveTextId_P1"));
patients = toList(myPatientDao.search(params));
assertEquals(1, patients.size());
}
@Test
public void testSearchWithMissingDate() {
IIdType orgId = myOrganizationDao.create(new Organization(), mySrd).getId();
IIdType notMissing;
IIdType missing;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
missing = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester_testSearchStringParam").addGiven("John");
patient.setBirthDateElement(new DateType("2011-01-01"));
patient.getManagingOrganization().setReferenceElement(orgId);
notMissing = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
// Date Param
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
DateParam param = new DateParam();
param.setMissing(false);
params.add(Patient.SP_BIRTHDATE, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInRelativeOrder(notMissing));
assertThat(patients, not(containsInRelativeOrder(missing)));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
DateParam param = new DateParam();
param.setMissing(true);
params.add(Patient.SP_BIRTHDATE, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInRelativeOrder(missing));
assertThat(patients, not(containsInRelativeOrder(notMissing)));
}
}
@Test
public void testSearchWithMissingDate2() {
MedicationRequest mr1 = new MedicationRequest();
mr1.getCategory().addCoding().setSystem("urn:medicationroute").setCode("oral");
mr1.addDosageInstruction().getTiming().addEventElement().setValueAsString("2017-01-01");
IIdType id1 = myMedicationRequestDao.create(mr1).getId().toUnqualifiedVersionless();
MedicationRequest mr2 = new MedicationRequest();
mr2.getCategory().addCoding().setSystem("urn:medicationroute").setCode("oral");
IIdType id2 = myMedicationRequestDao.create(mr2).getId().toUnqualifiedVersionless();
SearchParameterMap map = new SearchParameterMap();
map.add(MedicationRequest.SP_DATE, new DateParam().setMissing(true));
IBundleProvider results = myMedicationRequestDao.search(map);
List<String> ids = toUnqualifiedVersionlessIdValues(results);
assertThat(ids, contains(id2.getValue()));
}
@Test
public void testSearchWithMissingQuantity() {
IIdType notMissing;
IIdType missing;
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("001");
missing = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("002");
obs.setValue(new Quantity(123));
notMissing = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
// Quantity Param
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
QuantityParam param = new QuantityParam();
param.setMissing(false);
params.add(Observation.SP_VALUE_QUANTITY, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients, not(containsInRelativeOrder(missing)));
assertThat(patients, containsInRelativeOrder(notMissing));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
QuantityParam param = new QuantityParam();
param.setMissing(true);
params.add(Observation.SP_VALUE_QUANTITY, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients, containsInRelativeOrder(missing));
assertThat(patients, not(containsInRelativeOrder(notMissing)));
}
}
@Test
public void testSearchWithMissingReference() {
IIdType orgId = myOrganizationDao.create(new Organization(), mySrd).getId().toUnqualifiedVersionless();
IIdType notMissing;
IIdType missing;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
missing = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester_testSearchStringParam").addGiven("John");
patient.setBirthDateElement(new DateType("2011-01-01"));
patient.getManagingOrganization().setReferenceElement(orgId);
notMissing = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
// Reference Param
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
ReferenceParam param = new ReferenceParam();
param.setMissing(false);
params.add(Patient.SP_ORGANIZATION, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, not(containsInRelativeOrder(missing)));
assertThat(patients, containsInRelativeOrder(notMissing));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
ReferenceParam param = new ReferenceParam();
param.setMissing(true);
params.add(Patient.SP_ORGANIZATION, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInRelativeOrder(missing));
assertThat(patients, not(containsInRelativeOrder(notMissing)));
assertThat(patients, not(containsInRelativeOrder(orgId)));
}
}
@Test
public void testSearchWithMissingString() {
IIdType orgId = myOrganizationDao.create(new Organization(), mySrd).getId();
IIdType notMissing;
IIdType missing;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
missing = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().setFamily("Tester_testSearchStringParam").addGiven("John");
patient.setBirthDateElement(new DateType("2011-01-01"));
patient.getManagingOrganization().setReferenceElement(orgId);
notMissing = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
// String Param
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
StringParam param = new StringParam();
param.setMissing(false);
params.add(Patient.SP_FAMILY, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, not(containsInRelativeOrder(missing)));
assertThat(patients, containsInRelativeOrder(notMissing));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
StringParam param = new StringParam();
param.setMissing(true);
params.add(Patient.SP_FAMILY, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInRelativeOrder(missing));
assertThat(patients, not(containsInRelativeOrder(notMissing)));
}
}
@Test
public void testSearchWithNoResults() {
Device dev = new Device();
dev.addIdentifier().setSystem("Foo");
myDeviceDao.create(dev, mySrd);
IBundleProvider value = myDeviceDao.search(new SearchParameterMap());
ourLog.info("Initial size: " + value.size());
for (IBaseResource next : value.getResources(0, value.size())) {
ourLog.info("Deleting: {}", next.getIdElement());
myDeviceDao.delete(next.getIdElement(), mySrd);
}
value = myDeviceDao.search(new SearchParameterMap());
if (value.size() > 0) {
ourLog.info("Found: " + (value.getResources(0, 1).get(0).getIdElement()));
fail(myFhirContext.newXmlParser().setPrettyPrint(true).encodeResourceToString(value.getResources(0, 1).get(0)));
}
assertEquals(0, value.size().intValue());
List<IBaseResource> res = value.getResources(0, 0);
assertTrue(res.isEmpty());
}
@Test
public void testSearchWithRevIncludes() {
final String methodName = "testSearchWithRevIncludes";
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionMgr);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
IIdType pid = txTemplate.execute(new TransactionCallback<IIdType>() {
@Override
public IIdType doInTransaction(TransactionStatus theStatus) {
org.hl7.fhir.dstu3.model.Patient p = new org.hl7.fhir.dstu3.model.Patient();
p.addName().setFamily(methodName);
IIdType pid = myPatientDao.create(p).getId().toUnqualifiedVersionless();
org.hl7.fhir.dstu3.model.Condition c = new org.hl7.fhir.dstu3.model.Condition();
c.getSubject().setReferenceElement(pid);
myConditionDao.create(c);
return pid;
}
});
SearchParameterMap map = new SearchParameterMap();
map.add(IAnyResource.SP_RES_ID, new StringParam(pid.getIdPart()));
map.addRevInclude(Condition.INCLUDE_PATIENT);
IBundleProvider results = myPatientDao.search(map);
List<IBaseResource> foundResources = results.getResources(0, results.size());
assertEquals(Patient.class, foundResources.get(0).getClass());
assertEquals(Condition.class, foundResources.get(1).getClass());
}
@Test
public void testSearchWithSecurityAndProfileParams() {
String methodName = "testSearchWithSecurityAndProfileParams";
IIdType tag1id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
org.getMeta().addSecurity("urn:taglist", methodName + "1a", null);
tag1id = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType tag2id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
org.getMeta().addProfile("http://" + methodName);
tag2id = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
{
SearchParameterMap params = new SearchParameterMap();
params.add("_security", new TokenParam("urn:taglist", methodName + "1a"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id));
}
{
SearchParameterMap params = new SearchParameterMap();
params.add("_profile", new UriParam("http://" + methodName));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag2id));
}
}
@Test
public void testSearchWithTagParameter() {
String methodName = "testSearchWithTagParameter";
IIdType tag1id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
org.getMeta().addTag("urn:taglist", methodName + "1a", null);
org.getMeta().addTag("urn:taglist", methodName + "1b", null);
tag1id = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
TestUtil.sleepOneClick();
Date betweenDate = new Date();
IIdType tag2id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
org.getMeta().addTag("urn:taglist", methodName + "2a", null);
org.getMeta().addTag("urn:taglist", methodName + "2b", null);
tag2id = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
{
// One tag
SearchParameterMap params = new SearchParameterMap();
params.add("_tag", new TokenParam("urn:taglist", methodName + "1a"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id));
}
{
// Code only
SearchParameterMap params = new SearchParameterMap();
params.add("_tag", new TokenParam(null, methodName + "1a"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id));
}
{
// Or tags
SearchParameterMap params = new SearchParameterMap();
TokenOrListParam orListParam = new TokenOrListParam();
orListParam.add(new TokenParam("urn:taglist", methodName + "1a"));
orListParam.add(new TokenParam("urn:taglist", methodName + "2a"));
params.add("_tag", orListParam);
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id, tag2id));
}
{
// Or tags with lastupdated
SearchParameterMap params = new SearchParameterMap();
TokenOrListParam orListParam = new TokenOrListParam();
orListParam.add(new TokenParam("urn:taglist", methodName + "1a"));
orListParam.add(new TokenParam("urn:taglist", methodName + "2a"));
params.add("_tag", orListParam);
params.setLastUpdated(new DateRangeParam(betweenDate, null));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag2id));
}
// TODO: get multiple/AND working
{
// And tags
SearchParameterMap params = new SearchParameterMap();
TokenAndListParam andListParam = new TokenAndListParam();
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "1a"));
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "2a"));
params.add("_tag", andListParam);
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertEquals(0, patients.size());
}
{
// And tags
SearchParameterMap params = new SearchParameterMap();
TokenAndListParam andListParam = new TokenAndListParam();
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "1a"));
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "1b"));
params.add("_tag", andListParam);
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id));
}
}
@Test
public void testSearchWithTagParameterMissing() {
String methodName = "testSearchWithTagParameterMissing";
IIdType tag1id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
org.getMeta().addTag("urn:taglist", methodName + "1a", null);
org.getMeta().addTag("urn:taglist", methodName + "1b", null);
tag1id = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
IIdType tag2id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
org.getMeta().addTag("urn:taglist", methodName + "1b", null);
tag2id = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
}
{
// One tag
SearchParameterMap params = new SearchParameterMap();
params.add("_tag", new TokenParam("urn:taglist", methodName + "1a").setModifier(TokenParamModifier.NOT));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag2id));
assertThat(patients, not(containsInAnyOrder(tag1id)));
}
{
// Non existant tag
SearchParameterMap params = new SearchParameterMap();
params.add("_tag", new TokenParam("urn:taglist", methodName + "FOO").setModifier(TokenParamModifier.NOT));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id, tag2id));
}
{
// Common tag
SearchParameterMap params = new SearchParameterMap();
params.add("_tag", new TokenParam("urn:taglist", methodName + "1b").setModifier(TokenParamModifier.NOT));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, empty());
}
}
@Test
public void testSearchWithToken() {
IIdType notMissing;
IIdType missing;
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("001");
missing = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("002");
obs.getCode().addCoding().setSystem("urn:system").setCode("002");
notMissing = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
}
// Token Param
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
TokenParam param = new TokenParam();
param.setMissing(false);
params.add(Observation.SP_CODE, param);
myCaptureQueriesListener.clear();
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
myCaptureQueriesListener.logSelectQueriesForCurrentThread(0);
assertThat(patients, not(containsInRelativeOrder(missing)));
assertThat(patients, containsInRelativeOrder(notMissing));
}
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
TokenParam param = new TokenParam();
param.setMissing(true);
params.add(Observation.SP_CODE, param);
List<IIdType> patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients, containsInRelativeOrder(missing));
assertThat(patients, not(containsInRelativeOrder(notMissing)));
}
}
/**
* https://chat.fhir.org/#narrow/stream/implementers/topic/Understanding.20_include
*/
@Test
public void testSearchWithTypedInclude() {
IIdType patId;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
}
IIdType practId;
{
Practitioner pract = new Practitioner();
pract.addIdentifier().setSystem("urn:system").setValue("001");
practId = myPractitionerDao.create(pract, mySrd).getId().toUnqualifiedVersionless();
}
Appointment appt = new Appointment();
appt.addParticipant().getActor().setReference(patId.getValue());
appt.addParticipant().getActor().setReference(practId.getValue());
IIdType apptId = myAppointmentDao.create(appt, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.addInclude(Appointment.INCLUDE_PATIENT);
assertThat(toUnqualifiedVersionlessIds(myAppointmentDao.search(params)), containsInAnyOrder(patId, apptId));
}
@Test
public void testSearchWithUriParam() throws Exception {
Class<ValueSet> type = ValueSet.class;
String resourceName = "/valueset-dstu2.json";
ValueSet vs = loadResourceFromClasspath(type, resourceName);
IIdType id1 = myValueSetDao.update(vs, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs2 = new ValueSet();
vs2.setUrl("http://hl7.org/foo/bar");
myValueSetDao.create(vs2, mySrd).getId().toUnqualifiedVersionless();
IBundleProvider result;
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/fhir/ValueSet/basic-resource-type")));
assertThat(toUnqualifiedVersionlessIds(result), contains(id1));
result = myValueSetDao
.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/fhir/ValueSet/basic-resource-type").setQualifier(UriParamQualifierEnum.BELOW)));
assertThat(toUnqualifiedVersionlessIds(result), contains(id1));
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/fhir/ValueSet/").setQualifier(UriParamQualifierEnum.BELOW)));
assertThat(toUnqualifiedVersionlessIds(result), contains(id1));
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/fhir/ValueSet/FOOOOOO")));
assertThat(toUnqualifiedVersionlessIds(result), empty());
}
@Test
public void testSearchWithUriParamAbove() throws Exception {
ValueSet vs1 = new ValueSet();
vs1.setUrl("http://hl7.org/foo/baz");
myValueSetDao.create(vs1, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs2 = new ValueSet();
vs2.setUrl("http://hl7.org/foo/bar");
IIdType id2 = myValueSetDao.create(vs2, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs3 = new ValueSet();
vs3.setUrl("http://hl7.org/foo/bar/baz");
IIdType id3 = myValueSetDao.create(vs3, mySrd).getId().toUnqualifiedVersionless();
IBundleProvider result;
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/foo/bar/baz/boz").setQualifier(UriParamQualifierEnum.ABOVE)));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id2, id3));
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/foo/bar/baz").setQualifier(UriParamQualifierEnum.ABOVE)));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id2, id3));
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/foo/bar").setQualifier(UriParamQualifierEnum.ABOVE)));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id2));
result = myValueSetDao
.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/fhir/ValueSet/basic-resource-type").setQualifier(UriParamQualifierEnum.ABOVE)));
assertThat(toUnqualifiedVersionlessIds(result), empty());
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org").setQualifier(UriParamQualifierEnum.ABOVE)));
assertThat(toUnqualifiedVersionlessIds(result), empty());
}
@Test
public void testSearchWithUriParamBelow() throws Exception {
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
Class<ValueSet> type = ValueSet.class;
String resourceName = "/valueset-dstu2.json";
ValueSet vs = loadResourceFromClasspath(type, resourceName);
IIdType id1 = myValueSetDao.update(vs, mySrd).getId().toUnqualifiedVersionless();
ValueSet vs2 = new ValueSet();
vs2.setUrl("http://hl7.org/foo/bar");
IIdType id2 = myValueSetDao.create(vs2, mySrd).getId().toUnqualifiedVersionless();
IBundleProvider result;
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://").setQualifier(UriParamQualifierEnum.BELOW)));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id1, id2));
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org").setQualifier(UriParamQualifierEnum.BELOW)));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id1, id2));
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/foo").setQualifier(UriParamQualifierEnum.BELOW)));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder(id2));
result = myValueSetDao.search(new SearchParameterMap().setLoadSynchronous(true).add(ValueSet.SP_URL, new UriParam("http://hl7.org/foo/baz").setQualifier(UriParamQualifierEnum.BELOW)));
assertThat(toUnqualifiedVersionlessIds(result), containsInAnyOrder());
}
@Test
public void testSortOnId() throws Exception {
// Numeric ID
Patient p01 = new Patient();
p01.setActive(true);
p01.setGender(AdministrativeGender.MALE);
p01.addName().setFamily("B").addGiven("A");
String id1 = myPatientDao.create(p01).getId().toUnqualifiedVersionless().getValue();
// Numeric ID
Patient p02 = new Patient();
p02.setActive(true);
p02.setGender(AdministrativeGender.MALE);
p02.addName().setFamily("B").addGiven("B");
p02.addName().setFamily("Z").addGiven("Z");
String id2 = myPatientDao.create(p02).getId().toUnqualifiedVersionless().getValue();
// Forced ID
Patient pAB = new Patient();
pAB.setId("AB");
pAB.setActive(true);
pAB.setGender(AdministrativeGender.MALE);
pAB.addName().setFamily("A").addGiven("B");
myPatientDao.update(pAB);
// Forced ID
Patient pAA = new Patient();
pAA.setId("AA");
pAA.setActive(true);
pAA.setGender(AdministrativeGender.MALE);
pAA.addName().setFamily("A").addGiven("A");
myPatientDao.update(pAA);
SearchParameterMap map;
List<String> ids;
map = new SearchParameterMap();
map.setSort(new SortSpec("_id", SortOrderEnum.ASC));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(ids, contains("Patient/AA", "Patient/AB", id1, id2));
}
@Test
public void testSortOnLastUpdated() {
// Numeric ID
Patient p01 = new Patient();
p01.setActive(true);
p01.setGender(AdministrativeGender.MALE);
p01.addName().setFamily("B").addGiven("A");
String id1 = myPatientDao.create(p01).getId().toUnqualifiedVersionless().getValue();
TestUtil.sleepOneClick();
// Numeric ID
Patient p02 = new Patient();
p02.setActive(true);
p02.setGender(AdministrativeGender.MALE);
p02.addName().setFamily("B").addGiven("B");
p02.addName().setFamily("Z").addGiven("Z");
String id2 = myPatientDao.create(p02).getId().toUnqualifiedVersionless().getValue();
TestUtil.sleepOneClick();
// Forced ID
Patient pAB = new Patient();
pAB.setId("AB");
pAB.setActive(true);
pAB.setGender(AdministrativeGender.MALE);
pAB.addName().setFamily("A").addGiven("B");
myPatientDao.update(pAB);
TestUtil.sleepOneClick();
// Forced ID
Patient pAA = new Patient();
pAA.setId("AA");
pAA.setActive(true);
pAA.setGender(AdministrativeGender.MALE);
pAA.addName().setFamily("A").addGiven("A");
myPatientDao.update(pAA);
SearchParameterMap map;
List<String> ids;
map = new SearchParameterMap();
map.setSort(new SortSpec("_lastUpdated", SortOrderEnum.ASC));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(ids, contains(id1, id2, "Patient/AB", "Patient/AA"));
}
@Test
public void testSortOnSearchParameterWhereAllResourcesHaveAValue() throws Exception {
Patient pBA = new Patient();
pBA.setId("BA");
pBA.setActive(true);
pBA.setGender(AdministrativeGender.MALE);
pBA.addName().setFamily("B").addGiven("A");
myPatientDao.update(pBA);
Patient pBB = new Patient();
pBB.setId("BB");
pBB.setActive(true);
pBB.setGender(AdministrativeGender.MALE);
pBB.addName().setFamily("B").addGiven("B");
pBB.addName().setFamily("Z").addGiven("Z");
myPatientDao.update(pBB);
Patient pAB = new Patient();
pAB.setId("AB");
pAB.setActive(true);
pAB.setGender(AdministrativeGender.MALE);
pAB.addName().setFamily("A").addGiven("B");
myPatientDao.update(pAB);
Patient pAA = new Patient();
pAA.setId("AA");
pAA.setActive(true);
pAA.setGender(AdministrativeGender.MALE);
pAA.addName().setFamily("A").addGiven("A");
myPatientDao.update(pAA);
SearchParameterMap map;
List<String> ids;
// No search param
map = new SearchParameterMap();
map.setSort(new SortSpec("family", SortOrderEnum.ASC).setChain(new SortSpec("given", SortOrderEnum.ASC)));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(ids, contains("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB"));
// Same SP as sort
map = new SearchParameterMap();
map.add(Patient.SP_ACTIVE, new TokenParam(null, "true"));
map.setSort(new SortSpec("family", SortOrderEnum.ASC).setChain(new SortSpec("given", SortOrderEnum.ASC)));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(ids, contains("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB"));
// Different SP from sort
map = new SearchParameterMap();
map.add(Patient.SP_GENDER, new TokenParam(null, "male"));
map.setSort(new SortSpec("family", SortOrderEnum.ASC).setChain(new SortSpec("given", SortOrderEnum.ASC)));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(ids, contains("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB"));
map = new SearchParameterMap();
map.setSort(new SortSpec("gender").setChain(new SortSpec("family", SortOrderEnum.ASC).setChain(new SortSpec("given", SortOrderEnum.ASC))));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
ourLog.info("IDS: {}", ids);
assertThat(ids, contains("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB"));
map = new SearchParameterMap();
map.add(Patient.SP_ACTIVE, new TokenParam(null, "true"));
map.setSort(new SortSpec("family", SortOrderEnum.ASC).setChain(new SortSpec("given", SortOrderEnum.ASC)));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(ids, contains("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB"));
}
@SuppressWarnings("unused")
@Test
public void testSortOnSparselyPopulatedFields() {
IIdType pid1, pid2, pid3, pid4, pid5, pid6;
{
Patient p = new Patient();
p.setActive(true);
pid1 = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient p = new Patient();
p.addName().setFamily("A");
pid2 = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient p = new Patient();
p.addName().setFamily("B");
pid3 = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient p = new Patient();
p.addName().setFamily("B").addGiven("A");
pid4 = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
}
{
Patient p = new Patient();
p.addName().setFamily("B").addGiven("B");
pid5 = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
}
SearchParameterMap map;
List<IIdType> ids;
map = new SearchParameterMap();
map.setSort(new SortSpec(Patient.SP_FAMILY, SortOrderEnum.ASC).setChain(new SortSpec(Patient.SP_GIVEN, SortOrderEnum.ASC)));
ids = toUnqualifiedVersionlessIds(myPatientDao.search(map));
assertThat(ids.toString(), ids, contains(pid2, pid4, pid5, pid3, pid1));
assertEquals(5, ids.size());
}
@Test
public void testSortOnSparselyPopulatedSearchParameter() {
Patient pCA = new Patient();
pCA.setId("CA");
pCA.setActive(false);
pCA.getAddressFirstRep().addLine("A");
pCA.addName().setFamily("C").addGiven("A");
pCA.addName().setFamily("Z").addGiven("A");
myPatientDao.update(pCA);
Patient pBA = new Patient();
pBA.setId("BA");
pBA.setActive(true);
pBA.setGender(AdministrativeGender.MALE);
pBA.addName().setFamily("B").addGiven("A");
myPatientDao.update(pBA);
Patient pBB = new Patient();
pBB.setId("BB");
pBB.setActive(true);
pBB.setGender(AdministrativeGender.MALE);
pBB.addName().setFamily("B").addGiven("B");
myPatientDao.update(pBB);
Patient pAB = new Patient();
pAB.setId("AB");
pAB.setActive(true);
pAB.setGender(AdministrativeGender.MALE);
pAB.addName().setFamily("A").addGiven("B");
myPatientDao.update(pAB);
Patient pAA = new Patient();
pAA.setId("AA");
pAA.setActive(true);
pAA.setGender(AdministrativeGender.MALE);
pAA.addName().setFamily("A").addGiven("A");
myPatientDao.update(pAA);
SearchParameterMap map;
List<String> ids;
map = new SearchParameterMap();
map.setSort(new SortSpec("gender"));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
ourLog.info("IDS: {}", ids);
assertThat(ids.toString(), ids, containsInAnyOrder("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB", "Patient/CA"));
map = new SearchParameterMap();
map.setSort(new SortSpec("gender").setChain(new SortSpec("family", SortOrderEnum.ASC).setChain(new SortSpec("given", SortOrderEnum.ASC))));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
ourLog.info("IDS: {}", ids);
assertThat(ids.toString(), ids, contains("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB", "Patient/CA"));
map = new SearchParameterMap();
map.add(Patient.SP_ACTIVE, new TokenParam(null, "true"));
map.setSort(new SortSpec("family", SortOrderEnum.ASC).setChain(new SortSpec("given", SortOrderEnum.ASC)));
ids = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(ids.toString(), ids, contains("Patient/AA", "Patient/AB", "Patient/BA", "Patient/BB"));
}
@Test
public void testReplaceLinkSearchIndex() {
Patient pt = new Patient();
IIdType ptId = myPatientDao.create(pt).getId().toVersionless();
Observation obs = new Observation();
obs.setSubject(new Reference(ptId));
IIdType obsId = myObservationDao.create(obs).getId().toVersionless();
Practitioner pr = new Practitioner();
IIdType prId = myPractitionerDao.create(pr).getId().toVersionless();
obs.setId(obsId);
obs.setSubject(null);
obs.addPerformer(new Reference(prId));
myCaptureQueriesListener.clear();
myObservationDao.update(obs);
assertEquals(10, myCaptureQueriesListener.countSelectQueries());
assertEquals(5, myCaptureQueriesListener.countUpdateQueries());
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
String unformattedSql = myCaptureQueriesListener.getUpdateQueriesForCurrentThread().get(0).getSql(true, false);
assertThat(unformattedSql, stringContainsInOrder(
"SRC_PATH='Observation.performer'",
"SRC_RESOURCE_ID='" + obsId.getIdPart() + "'",
"TARGET_RESOURCE_ID='" + prId.getIdPart() + "'",
"TARGET_RESOURCE_TYPE='Practitioner'"
));
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
}
private String toStringMultiline(List<?> theResults) {
StringBuilder b = new StringBuilder();
for (Object next : theResults) {
b.append('\n');
b.append(" * ").append(next.toString());
}
return b.toString();
}
}
|
package works.hop.javro.jdbc.sample.account;
public interface IAddress {
String getCity();
void setCity(String city);
String getState();
void setState(String state);
String getZipCode();
void setZipCode(String zipCode);
}
|
package extrabiomes.plugins.redpower;
import java.util.Random;
import net.minecraft.src.World;
import net.minecraft.src.WorldGenFlowers;
import extrabiomes.api.IBiomeDecoration;
import extrabiomes.plugins.PluginRedPower;
public class BiomeDecorationPlants implements IBiomeDecoration {
private final int iterations;
private static boolean generatePlants = true;
public BiomeDecorationPlants() {
this(1);
}
public BiomeDecorationPlants(int iterations) {
this.iterations = iterations;
}
@Override
public void decorate(World world, Random rand, int chunkX, int chunkZ) {
for (int a = 0; a < iterations; a++) {
int x = chunkX + rand.nextInt(16) + 8;
int y = rand.nextInt(128);
int z = chunkZ + rand.nextInt(16) + 8;
(new WorldGenFlowers(PluginRedPower.idPlants)).generate(world,
rand, x, y, z);
}
}
}
|
package com.mardous.concurrency.task;
import androidx.annotation.NonNull;
import com.mardous.concurrency.AsyncCallable;
import com.mardous.concurrency.ResultFilter;
import com.mardous.concurrency.Utils;
import com.mardous.concurrency.internal.Predicate;
/**
* The {@link TaskBuilder builder} used to create {@link ResultTask result tasks}.
*
* @author Christians Martínez Alvarado (mardous)
*/
public class ResultTaskBuilder<Result> extends TaskBuilder<ResultTask<Result>> {
private final AsyncCallable<Result> action;
private final ResultFilter<Result> resultFilter = new ResultFilter<>();
public ResultTaskBuilder(@NonNull AsyncCallable<Result> action) {
this.action = action;
}
/**
* Adds a {@link Predicate predicate} that will be used to
* test the result of this task.
* <p>All predicates are added to a {@link ResultFilter filter},
* so when the task finishes its execution, and there is a result,
* the filter will proceed to iterate through all predicates testing
* then on the result.
* <p>If a test is not passed, that result is declared as unacceptable,
* and therefore, the {@link AsyncCallable#onBadResult(Object)} method
* is called immediately.
*
* @param predicate The {@link Predicate predicate} that will be used
* to test the result of this task.
* @return This same builder.
*/
public ResultTaskBuilder<Result> addFilter(Predicate<Result> predicate) {
this.resultFilter.add(predicate);
return this;
}
public ResultTaskBuilder<Result> notAccepts(Result not) {
return addFilter(result -> !Utils.equals(result, not));
}
public ResultTaskBuilder<Result> acceptsNull(boolean acceptsNull) {
return addFilter(result -> acceptsNull || result != null);
}
@Override
public ResultTask<Result> create() {
return new ResultTask<Result>(this, action, resultFilter);
}
@Override
public ResultTask<Result> execute() {
return create().execute();
}
}
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2018 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.zold.api;
import org.cactoos.iterable.IterableOf;
import org.junit.Test;
import org.mockito.Mockito;
/**
* Test case for {@link Taxes}.
*
* @since 1.0
* @checkstyle JavadocMethodCheck (500 lines)
*/
public final class TaxesTest {
@Test(expected = UnsupportedOperationException.class)
public void payNotYetSupported() throws Exception {
new Taxes(new IterableOf<>()).exec(Mockito.mock(Wallet.class));
}
}
|
/*
* Copyright (C) 2010 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.base;
import com.google.common.annotations.GwtCompatible;
/**
* Static methods pertaining to ASCII characters (those in the range of values
* {@code 0x00} through {@code 0x7F}), and to strings containing such
* characters.
*
* <p>ASCII utilities also exist in other classes of this package:
* <ul>
* <!-- TODO(kevinb): how can we make this not produce a warning when building gwt javadoc? -->
* <li>{@link Charsets#US_ASCII} specifies the {@code Charset} of ASCII characters.
* <li>{@link CharMatcher#ASCII} matches ASCII characters and provides text processing methods
* which operate only on the ASCII characters of a string.
* </ul>
*
* @author Craig Berry
* @author Gregory Kick
* @since 7.0
*/
@GwtCompatible
public final class Ascii {
private Ascii() {}
/* The ASCII control characters, per RFC 20. */
/**
* Null ('\0'): The all-zeros character which may serve to accomplish
* time fill and media fill. Normally used as a C string terminator.
* <p>Although RFC 20 names this as "Null", note that it is distinct
* from the C/C++ "NULL" pointer.
*
* @since 8.0
*/
public static final byte NUL = 0;
/**
* Start of Heading: A communication control character used at
* the beginning of a sequence of characters which constitute a
* machine-sensible address or routing information. Such a sequence is
* referred to as the "heading." An STX character has the effect of
* terminating a heading.
*
* @since 8.0
*/
public static final byte SOH = 1;
/**
* Start of Text: A communication control character which
* precedes a sequence of characters that is to be treated as an entity
* and entirely transmitted through to the ultimate destination. Such a
* sequence is referred to as "text." STX may be used to terminate a
* sequence of characters started by SOH.
*
* @since 8.0
*/
public static final byte STX = 2;
/**
* End of Text: A communication control character used to
* terminate a sequence of characters started with STX and transmitted
* as an entity.
*
* @since 8.0
*/
public static final byte ETX = 3;
/**
* End of Transmission: A communication control character used
* to indicate the conclusion of a transmission, which may have
* contained one or more texts and any associated headings.
*
* @since 8.0
*/
public static final byte EOT = 4;
/**
* Enquiry: A communication control character used in data
* communication systems as a request for a response from a remote
* station. It may be used as a "Who Are You" (WRU) to obtain
* identification, or may be used to obtain station status, or both.
*
* @since 8.0
*/
public static final byte ENQ = 5;
/**
* Acknowledge: A communication control character transmitted
* by a receiver as an affirmative response to a sender.
*
* @since 8.0
*/
public static final byte ACK = 6;
/**
* Bell ('\a'): A character for use when there is a need to call for
* human attention. It may control alarm or attention devices.
*
* @since 8.0
*/
public static final byte BEL = 7;
/**
* Backspace ('\b'): A format effector which controls the movement of
* the printing position one printing space backward on the same
* printing line. (Applicable also to display devices.)
*
* @since 8.0
*/
public static final byte BS = 8;
/**
* Horizontal Tabulation ('\t'): A format effector which controls the
* movement of the printing position to the next in a series of
* predetermined positions along the printing line. (Applicable also to
* display devices and the skip function on punched cards.)
*
* @since 8.0
*/
public static final byte HT = 9;
/**
* Line Feed ('\n'): A format effector which controls the movement of
* the printing position to the next printing line. (Applicable also to
* display devices.) Where appropriate, this character may have the
* meaning "New Line" (NL), a format effector which controls the
* movement of the printing point to the first printing position on the
* next printing line. Use of this convention requires agreement
* between sender and recipient of data.
*
* @since 8.0
*/
public static final byte LF = 10;
/**
* Alternate name for {@link #LF}. ({@code LF} is preferred.)
*
* @since 8.0
*/
public static final byte NL = 10;
/**
* Vertical Tabulation ('\v'): A format effector which controls the
* movement of the printing position to the next in a series of
* predetermined printing lines. (Applicable also to display devices.)
*
* @since 8.0
*/
public static final byte VT = 11;
/**
* Form Feed ('\f'): A format effector which controls the movement of
* the printing position to the first pre-determined printing line on
* the next form or page. (Applicable also to display devices.)
*
* @since 8.0
*/
public static final byte FF = 12;
/**
* Carriage Return ('\r'): A format effector which controls the
* movement of the printing position to the first printing position on
* the same printing line. (Applicable also to display devices.)
*
* @since 8.0
*/
public static final byte CR = 13;
/**
* Shift Out: A control character indicating that the code
* combinations which follow shall be interpreted as outside of the
* character set of the standard code table until a Shift In character
* is reached.
*
* @since 8.0
*/
public static final byte SO = 14;
/**
* Shift In: A control character indicating that the code
* combinations which follow shall be interpreted according to the
* standard code table.
*
* @since 8.0
*/
public static final byte SI = 15;
/**
* Data Link Escape: A communication control character which
* will change the meaning of a limited number of contiguously following
* characters. It is used exclusively to provide supplementary controls
* in data communication networks.
*
* @since 8.0
*/
public static final byte DLE = 16;
/**
* Device Control 1. Characters for the control
* of ancillary devices associated with data processing or
* telecommunication systems, more especially switching devices "on" or
* "off." (If a single "stop" control is required to interrupt or turn
* off ancillary devices, DC4 is the preferred assignment.)
*
* @since 8.0
*/
public static final byte DC1 = 17; // aka XON
/**
* Transmission On: Although originally defined as DC1, this ASCII
* control character is now better known as the XON code used for software
* flow control in serial communications. The main use is restarting
* the transmission after the communication has been stopped by the XOFF
* control code.
*
* @since 8.0
*/
public static final byte XON = 17; // aka DC1
/**
* Device Control 2. Characters for the control
* of ancillary devices associated with data processing or
* telecommunication systems, more especially switching devices "on" or
* "off." (If a single "stop" control is required to interrupt or turn
* off ancillary devices, DC4 is the preferred assignment.)
*
* @since 8.0
*/
public static final byte DC2 = 18;
/**
* Device Control 3. Characters for the control
* of ancillary devices associated with data processing or
* telecommunication systems, more especially switching devices "on" or
* "off." (If a single "stop" control is required to interrupt or turn
* off ancillary devices, DC4 is the preferred assignment.)
*
* @since 8.0
*/
public static final byte DC3 = 19; // aka XOFF
/**
* Transmission off. See {@link #XON} for explanation.
*
* @since 8.0
*/
public static final byte XOFF = 19; // aka DC3
/**
* Device Control 4. Characters for the control
* of ancillary devices associated with data processing or
* telecommunication systems, more especially switching devices "on" or
* "off." (If a single "stop" control is required to interrupt or turn
* off ancillary devices, DC4 is the preferred assignment.)
*
* @since 8.0
*/
public static final byte DC4 = 20;
/**
* Negative Acknowledge: A communication control character
* transmitted by a receiver as a negative response to the sender.
*
* @since 8.0
*/
public static final byte NAK = 21;
/**
* Synchronous Idle: A communication control character used by
* a synchronous transmission system in the absence of any other
* character to provide a signal from which synchronism may be achieved
* or retained.
*
* @since 8.0
*/
public static final byte SYN = 22;
/**
* End of Transmission Block: A communication control character
* used to indicate the end of a block of data for communication
* purposes. ETB is used for blocking data where the block structure is
* not necessarily related to the processing format.
*
* @since 8.0
*/
public static final byte ETB = 23;
/**
* Cancel: A control character used to indicate that the data
* with which it is sent is in error or is to be disregarded.
*
* @since 8.0
*/
public static final byte CAN = 24;
/**
* End of Medium: A control character associated with the sent
* data which may be used to identify the physical end of the medium, or
* the end of the used, or wanted, portion of information recorded on a
* medium. (The position of this character does not necessarily
* correspond to the physical end of the medium.)
*
* @since 8.0
*/
public static final byte EM = 25;
/**
* Substitute: A character that may be substituted for a
* character which is determined to be invalid or in error.
*
* @since 8.0
*/
public static final byte SUB = 26;
/**
* Escape: A control character intended to provide code
* extension (supplementary characters) in general information
* interchange. The Escape character itself is a prefix affecting the
* interpretation of a limited number of contiguously following
* characters.
*
* @since 8.0
*/
public static final byte ESC = 27;
/**
* File Separator: These four information separators may be
* used within data in optional fashion, except that their hierarchical
* relationship shall be: FS is the most inclusive, then GS, then RS,
* and US is least inclusive. (The content and length of a File, Group,
* Record, or Unit are not specified.)
*
* @since 8.0
*/
public static final byte FS = 28;
/**
* Group Separator: These four information separators may be
* used within data in optional fashion, except that their hierarchical
* relationship shall be: FS is the most inclusive, then GS, then RS,
* and US is least inclusive. (The content and length of a File, Group,
* Record, or Unit are not specified.)
*
* @since 8.0
*/
public static final byte GS = 29;
/**
* Record Separator: These four information separators may be
* used within data in optional fashion, except that their hierarchical
* relationship shall be: FS is the most inclusive, then GS, then RS,
* and US is least inclusive. (The content and length of a File, Group,
* Record, or Unit are not specified.)
*
* @since 8.0
*/
public static final byte RS = 30;
/**
* Unit Separator: These four information separators may be
* used within data in optional fashion, except that their hierarchical
* relationship shall be: FS is the most inclusive, then GS, then RS,
* and US is least inclusive. (The content and length of a File, Group,
* Record, or Unit are not specified.)
*
* @since 8.0
*/
public static final byte US = 31;
/**
* Space: A normally non-printing graphic character used to
* separate words. It is also a format effector which controls the
* movement of the printing position, one printing position forward.
* (Applicable also to display devices.)
*
* @since 8.0
*/
public static final byte SP = 32;
/**
* Alternate name for {@link #SP}.
*
* @since 8.0
*/
public static final byte SPACE = 32;
/**
* Delete: This character is used primarily to "erase" or
* "obliterate" erroneous or unwanted characters in perforated tape.
*
* @since 8.0
*/
public static final byte DEL = 127;
/**
* The minimum value of an ASCII character.
*
* @since 9.0 (was type {@code int} before 12.0)
*/
public static final char MIN = 0;
/**
* The maximum value of an ASCII character.
*
* @since 9.0 (was type {@code int} before 12.0)
*/
public static final char MAX = 127;
/**
* Returns a copy of the input string in which all {@linkplain #isUpperCase(char) uppercase ASCII
* characters} have been converted to lowercase. All other characters are copied without
* modification.
*/
public static String toLowerCase(String string) {
int length = string.length();
StringBuilder builder = new StringBuilder(length);
for (int i = 0; i < length; i++) {
builder.append(toLowerCase(string.charAt(i)));
}
return builder.toString();
}
/**
* If the argument is an {@linkplain #isUpperCase(char) uppercase ASCII character} returns the
* lowercase equivalent. Otherwise returns the argument.
*/
public static char toLowerCase(char c) {
return isUpperCase(c) ? (char) (c ^ 0x20) : c;
}
/**
* Returns a copy of the input string in which all {@linkplain #isLowerCase(char) lowercase ASCII
* characters} have been converted to uppercase. All other characters are copied without
* modification.
*/
public static String toUpperCase(String string) {
int length = string.length();
StringBuilder builder = new StringBuilder(length);
for (int i = 0; i < length; i++) {
builder.append(toUpperCase(string.charAt(i)));
}
return builder.toString();
}
/**
* If the argument is a {@linkplain #isLowerCase(char) lowercase ASCII character} returns the
* uppercase equivalent. Otherwise returns the argument.
*/
public static char toUpperCase(char c) {
return isLowerCase(c) ? (char) (c & 0x5f) : c;
}
/**
* Indicates whether {@code c} is one of the twenty-six lowercase ASCII alphabetic characters
* between {@code 'a'} and {@code 'z'} inclusive. All others (including non-ASCII characters)
* return {@code false}.
*/
public static boolean isLowerCase(char c) {
return (c >= 'a') && (c <= 'z');
}
/**
* Indicates whether {@code c} is one of the twenty-six uppercase ASCII alphabetic characters
* between {@code 'A'} and {@code 'Z'} inclusive. All others (including non-ASCII characters)
* return {@code false}.
*/
public static boolean isUpperCase(char c) {
return (c >= 'A') && (c <= 'Z');
}
}
|
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.internal.connection;
import com.mongodb.MongoException;
import com.mongodb.connection.BufferProvider;
import com.mongodb.connection.ConnectionDescription;
import com.mongodb.connection.ConnectionId;
import com.mongodb.connection.ServerDescription;
import com.mongodb.connection.ServerId;
import com.mongodb.connection.ServerType;
import com.mongodb.internal.async.SingleResultCallback;
import com.mongodb.internal.session.SessionContext;
import org.bson.BsonBinaryReader;
import org.bson.BsonDocument;
import org.bson.ByteBuf;
import org.bson.ByteBufNIO;
import org.bson.codecs.BsonDocumentCodec;
import org.bson.codecs.Decoder;
import org.bson.io.BsonInput;
import org.bson.io.ByteBufferBsonInput;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Collections;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import static com.mongodb.internal.connection.ProtocolHelper.getCommandFailureException;
import static com.mongodb.internal.connection.ProtocolHelper.isCommandOk;
class TestInternalConnection implements InternalConnection {
private static class Interaction {
private ResponseBuffers responseBuffers;
private RuntimeException receiveException;
private RuntimeException sendException;
}
private final ConnectionDescription description;
private final BufferProvider bufferProvider;
private final Deque<Interaction> replies;
private final List<BsonInput> sent;
private boolean opened;
private boolean closed;
TestInternalConnection(final ServerId serverId) {
this.description = new ConnectionDescription(new ConnectionId(serverId), 0, ServerType.UNKNOWN, 0, 0, 0,
Collections.<String>emptyList());
this.bufferProvider = new SimpleBufferProvider();
this.replies = new LinkedList<Interaction>();
this.sent = new LinkedList<BsonInput>();
}
public void enqueueReply(final ResponseBuffers responseBuffers) {
Interaction interaction = new Interaction();
interaction.responseBuffers = responseBuffers;
replies.add(interaction);
}
public void enqueueSendMessageException(final RuntimeException e) {
Interaction interaction = new Interaction();
interaction.sendException = e;
replies.add(interaction);
}
public void enqueueReceiveMessageException(final RuntimeException e) {
Interaction interaction = new Interaction();
interaction.receiveException = e;
replies.add(interaction);
}
public List<BsonInput> getSent() {
return sent;
}
@Override
public ConnectionDescription getDescription() {
return description;
}
@Override
public ServerDescription getInitialServerDescription() {
throw new UnsupportedOperationException();
}
public void open() {
opened = true;
}
@Override
public void openAsync(final SingleResultCallback<Void> callback) {
opened = true;
callback.onResult(null, null);
}
@Override
public void close() {
closed = true;
}
@Override
public boolean opened() {
return opened;
}
@Override
public boolean isClosed() {
return closed;
}
@Override
public void sendMessage(final List<ByteBuf> byteBuffers, final int lastRequestId) {
// repackage all byte buffers into a single byte buffer...
int totalSize = 0;
for (ByteBuf buf : byteBuffers) {
totalSize += buf.remaining();
}
ByteBuffer combined = ByteBuffer.allocate(totalSize);
for (ByteBuf buf : byteBuffers) {
combined.put(buf.array(), 0, buf.remaining());
}
((Buffer) combined).flip();
Interaction interaction = replies.getFirst();
if (interaction.responseBuffers != null) {
ReplyHeader header = replaceResponseTo(interaction.responseBuffers.getReplyHeader(), lastRequestId);
interaction.responseBuffers = (new ResponseBuffers(header, interaction.responseBuffers.getBodyByteBuffer()));
sent.add(new ByteBufferBsonInput(new ByteBufNIO(combined)));
} else if (interaction.sendException != null) {
replies.removeFirst();
throw interaction.sendException;
}
}
@Override
public <T> T sendAndReceive(final CommandMessage message, final Decoder<T> decoder, final SessionContext sessionContext) {
ByteBufferBsonOutput bsonOutput = new ByteBufferBsonOutput(this);
try {
message.encode(bsonOutput, sessionContext);
sendMessage(bsonOutput.getByteBuffers(), message.getId());
} finally {
bsonOutput.close();
}
ResponseBuffers responseBuffers = receiveMessage(message.getId());
try {
boolean commandOk = isCommandOk(new BsonBinaryReader(new ByteBufferBsonInput(responseBuffers.getBodyByteBuffer())));
responseBuffers.reset();
if (!commandOk) {
throw getCommandFailureException(getResponseDocument(responseBuffers, message, new BsonDocumentCodec()),
description.getServerAddress());
}
return new ReplyMessage<T>(responseBuffers, decoder, message.getId()).getDocuments().get(0);
} finally {
responseBuffers.close();
}
}
@Override
public <T> void send(final CommandMessage message, final Decoder<T> decoder, final SessionContext sessionContext) {
throw new UnsupportedOperationException();
}
@Override
public <T> T receive(final Decoder<T> decoder, final SessionContext sessionContext) {
throw new UnsupportedOperationException();
}
@Override
public boolean hasMoreToCome() {
throw new UnsupportedOperationException();
}
private <T extends BsonDocument> T getResponseDocument(final ResponseBuffers responseBuffers,
final CommandMessage commandMessage, final Decoder<T> decoder) {
ReplyMessage<T> replyMessage = new ReplyMessage<T>(responseBuffers, decoder, commandMessage.getId());
responseBuffers.reset();
return replyMessage.getDocuments().get(0);
}
@Override
public <T> void sendAndReceiveAsync(final CommandMessage message, final Decoder<T> decoder,
final SessionContext sessionContext, final SingleResultCallback<T> callback) {
try {
T result = sendAndReceive(message, decoder, sessionContext);
callback.onResult(result, null);
} catch (MongoException ex) {
callback.onResult(null, ex);
}
}
private ReplyHeader replaceResponseTo(final ReplyHeader header, final int responseTo) {
ByteBuffer headerByteBuffer = ByteBuffer.allocate(36);
headerByteBuffer.order(ByteOrder.LITTLE_ENDIAN);
headerByteBuffer.putInt(header.getMessageLength());
headerByteBuffer.putInt(header.getRequestId());
headerByteBuffer.putInt(responseTo);
headerByteBuffer.putInt(1);
headerByteBuffer.putInt(header.getResponseFlags());
headerByteBuffer.putLong(header.getCursorId());
headerByteBuffer.putInt(header.getStartingFrom());
headerByteBuffer.putInt(header.getNumberReturned());
((Buffer) headerByteBuffer).flip();
ByteBufNIO buffer = new ByteBufNIO(headerByteBuffer);
MessageHeader messageHeader = new MessageHeader(buffer, ConnectionDescription.getDefaultMaxMessageSize());
return new ReplyHeader(buffer, messageHeader); }
@Override
public ResponseBuffers receiveMessage(final int responseTo) {
if (this.replies.isEmpty()) {
throw new MongoException("Test was not setup properly as too many calls to receiveMessage occured.");
}
Interaction interaction = replies.removeFirst();
if (interaction.responseBuffers != null) {
return interaction.responseBuffers;
} else {
throw interaction.receiveException;
}
}
@Override
public void sendMessageAsync(final List<ByteBuf> byteBuffers, final int lastRequestId, final SingleResultCallback<Void> callback) {
try {
sendMessage(byteBuffers, lastRequestId);
callback.onResult(null, null);
} catch (RuntimeException e) {
callback.onResult(null, e);
}
}
@Override
public void receiveMessageAsync(final int responseTo, final SingleResultCallback<ResponseBuffers> callback) {
try {
ResponseBuffers buffers = receiveMessage(responseTo);
callback.onResult(buffers, null);
} catch (MongoException ex) {
callback.onResult(null, ex);
}
}
@Override
public ByteBuf getBuffer(final int size) {
return this.bufferProvider.getBuffer(size);
}
}
|
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.gui.components;
import com.haulmont.chile.core.model.MetaClass;
/**
* The interface is implemented by generic filter components. It contains low-level methods
* that generally should not be used in client code
*/
public interface FilterImplementation {
MetaClass getEntityMetaClass();
String getEntityAlias();
void loadFiltersAndApplyDefault();
}
|
package tconstruct.items.blocks;
import java.util.List;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.util.MathHelper;
public class SlimeGrassItemBlock extends ItemBlock
{
public static final String blockType[] = { "blue", "dirt" };
public SlimeGrassItemBlock(int id)
{
super(id);
setMaxDamage(0);
setHasSubtypes(true);
}
public int getMetadata (int meta)
{
return meta;
}
public String getUnlocalizedName (ItemStack itemstack)
{
int pos = MathHelper.clamp_int(itemstack.getItemDamage(), 0, blockType.length - 1);
return (new StringBuilder()).append("block.slime.grass.").append(blockType[pos]).toString();
}
}
|
/*
* Copyright (C) 2016 Michigan State University Board of Trustees
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package edu.msu.cme.rdp.primerdesign.utils;
import edu.msu.cme.rdp.readseq.readers.Sequence;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
/**
*
* @author leotift
*/
public class LeastOccurTest {
public LeastOccurTest() {
}
// TODO add test methods here.
// The methods must be annotated with annotation @Test. For example:
//
// @Test
//
@Test
public void test() throws IOException {
System.out.print("Position");
System.out.print('\t');
System.out.println("[Highest % Occur,Lowest % Occur]");
Sequence oligo1 = new Sequence("seq1","","CGTTGA");
Sequence oligo2 = new Sequence("seq1","","CGTTGA");
Sequence oligo3 = new Sequence("seq1","","CATTGT");
Sequence oligo4 = new Sequence("seq1","","GTATAT");
List<Sequence> allSeqs = new ArrayList();
allSeqs.add(oligo1);
allSeqs.add(oligo2);
allSeqs.add(oligo3);
allSeqs.add(oligo4);
RefSetAnalysis analysis = new RefSetAnalysis();
analysis.buildBasePositionMap(allSeqs);
for(int position : analysis.getBasePositionMap().keySet()) {
System.out.print(position);
System.out.print('\t');
System.out.print('\t');
System.out.print('\t');
System.out.println(analysis.getBasePositionMap().get(position));
}
}
}
|
/* DefaultTableModel.java --
Copyright (C) 2002, 2004, 2005, 2006, Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package javax.swing.table;
import java.io.Serializable;
import java.util.Vector;
import javax.swing.event.TableModelEvent;
/**
* A two dimensional data structure used to store <code>Object</code>
* instances, usually for display in a <code>JTable</code> component.
*
* @author Andrew Selkirk
*/
public class DefaultTableModel extends AbstractTableModel
implements Serializable
{
static final long serialVersionUID = 6680042567037222321L;
/**
* Storage for the rows in the table (each row is itself
* a <code>Vector</code>).
*/
protected Vector dataVector;
/**
* Storage for the column identifiers.
*/
protected Vector columnIdentifiers;
/**
* Creates an empty table with zero rows and zero columns.
*/
public DefaultTableModel()
{
this(0, 0);
}
/**
* Creates a new table with the specified number of rows and columns.
* All cells in the table are initially empty (set to <code>null</code>).
*
* @param numRows the number of rows.
* @param numColumns the number of columns.
*/
public DefaultTableModel(int numRows, int numColumns)
{
Vector defaultNames = new Vector(numColumns);
Vector data = new Vector(numRows);
for (int i = 0; i < numColumns; i++)
{
defaultNames.add(super.getColumnName(i));
}
for (int r = 0; r < numRows; r++)
{
Vector tmp = new Vector(numColumns);
tmp.setSize(numColumns);
data.add(tmp);
}
setDataVector(data, defaultNames);
}
/**
* Creates a new table with the specified column names and number of
* rows. The number of columns is determined by the number of column
* names supplied.
*
* @param columnNames the column names.
* @param numRows the number of rows.
*/
public DefaultTableModel(Vector columnNames, int numRows)
{
if (numRows < 0)
throw new IllegalArgumentException("numRows < 0");
Vector data = new Vector();
int numColumns = 0;
if (columnNames != null)
numColumns = columnNames.size();
while (0 < numRows--)
{
Vector rowData = new Vector();
rowData.setSize(numColumns);
data.add(rowData);
}
setDataVector(data, columnNames);
}
/**
* Creates a new table with the specified column names and row count.
*
* @param columnNames the column names.
* @param numRows the number of rows.
*/
public DefaultTableModel(Object[] columnNames, int numRows)
{
this(convertToVector(columnNames), numRows);
}
/**
* Creates a new table with the specified data values and column names.
*
* @param data the data values.
* @param columnNames the column names.
*/
public DefaultTableModel(Vector data, Vector columnNames)
{
setDataVector(data, columnNames);
}
/**
* Creates a new table with the specified data values and column names.
*
* @param data the data values.
* @param columnNames the column names.
*/
public DefaultTableModel(Object[][] data, Object[] columnNames)
{
this(convertToVector(data), convertToVector(columnNames));
}
/**
* Returns the vector containing the row data for the table.
*
* @return The data vector.
*/
public Vector getDataVector()
{
return dataVector;
}
/**
* Sets the data and column identifiers for the table. The data vector
* contains a <code>Vector</code> for each row in the table - if the
* number of objects in each row does not match the number of column
* names specified, the row data is truncated or expanded (by adding
* <code>null</code> values) as required.
*
* @param data the data for the table (a vector of row vectors).
* @param columnNames the column names.
*
* @throws NullPointerException if either argument is <code>null</code>.
*/
public void setDataVector(Vector data, Vector columnNames)
{
if (data == null)
dataVector = new Vector();
else
dataVector = data;
setColumnIdentifiers(columnNames);
}
/**
* Sets the data and column identifiers for the table.
*
* @param data the data for the table.
* @param columnNames the column names.
*
* @throws NullPointerException if either argument is <code>null</code>.
*/
public void setDataVector(Object[][] data, Object[] columnNames)
{
setDataVector(convertToVector(data),
convertToVector(columnNames));
}
/**
* Sends the specified <code>event</code> to all registered listeners.
* This method is equivalent to
* {@link AbstractTableModel#fireTableChanged(TableModelEvent)}.
*
* @param event the event.
*/
public void newDataAvailable(TableModelEvent event)
{
fireTableChanged(event);
}
/**
* Sends the specified <code>event</code> to all registered listeners.
* This method is equivalent to
* {@link AbstractTableModel#fireTableChanged(TableModelEvent)}.
*
* @param event the event.
*/
public void newRowsAdded(TableModelEvent event)
{
fireTableChanged(event);
}
/**
* Sends the specified <code>event</code> to all registered listeners.
* This method is equivalent to
* {@link AbstractTableModel#fireTableChanged(TableModelEvent)}.
*
* @param event the event.
*/
public void rowsRemoved(TableModelEvent event)
{
fireTableChanged(event);
}
/**
* Sets the column identifiers, updates the data rows (truncating
* or padding each row with <code>null</code> values) to match the
* number of columns, and sends a {@link TableModelEvent} to all
* registered listeners.
*
* @param columnIdentifiers the column identifiers.
*/
public void setColumnIdentifiers(Vector columnIdentifiers)
{
this.columnIdentifiers = columnIdentifiers;
setColumnCount(columnIdentifiers == null ? 0 : columnIdentifiers.size());
}
/**
* Sets the column identifiers, updates the data rows (truncating
* or padding each row with <code>null</code> values) to match the
* number of columns, and sends a {@link TableModelEvent} to all
* registered listeners.
*
* @param columnIdentifiers the column identifiers.
*/
public void setColumnIdentifiers(Object[] columnIdentifiers)
{
setColumnIdentifiers(convertToVector(columnIdentifiers));
}
/**
* This method is obsolete, use {@link #setRowCount(int)} instead.
*
* @param numRows the number of rows.
*/
public void setNumRows(int numRows)
{
setRowCount(numRows);
}
/**
* Sets the number of rows in the table. If <code>rowCount</code> is less
* than the current number of rows in the table, rows are discarded.
* If <code>rowCount</code> is greater than the current number of rows in
* the table, new (empty) rows are added.
*
* @param rowCount the row count.
*/
public void setRowCount(int rowCount)
{
int existingRowCount = dataVector.size();
if (rowCount < existingRowCount)
{
dataVector.setSize(rowCount);
fireTableRowsDeleted(rowCount, existingRowCount - 1);
}
else
{
int rowsToAdd = rowCount - existingRowCount;
addExtraRows(rowsToAdd, columnIdentifiers.size());
fireTableRowsInserted(existingRowCount, rowCount - 1);
}
}
/**
* Sets the number of columns in the table. Existing rows are truncated
* or padded with <code>null</code> values to match the new column count.
* A {@link TableModelEvent} is sent to all registered listeners.
*
* @param columnCount the column count.
*/
public void setColumnCount(int columnCount)
{
for (int i = 0; i < dataVector.size(); ++i)
{
((Vector) dataVector.get(i)).setSize(columnCount);
}
if (columnIdentifiers != null)
columnIdentifiers.setSize(columnCount);
fireTableStructureChanged();
}
/**
* Adds a column with the specified name to the table. All cell values
* for the column are initially set to <code>null</code>.
*
* @param columnName the column name (<code>null</code> permitted).
*/
public void addColumn(Object columnName)
{
addColumn(columnName, (Object[]) null);
}
/**
* Adds a column with the specified name and data values to the table.
*
* @param columnName the column name (<code>null</code> permitted).
* @param columnData the column data.
*/
public void addColumn(Object columnName, Vector columnData)
{
Object[] dataArray = null;
if (columnData != null)
{
int rowCount = dataVector.size();
if (columnData.size() < rowCount)
columnData.setSize(rowCount);
dataArray = columnData.toArray();
}
addColumn(columnName, dataArray);
}
/**
* Adds a column with the specified name and data values to the table.
*
* @param columnName the column name (<code>null</code> permitted).
* @param columnData the column data.
*/
public void addColumn(Object columnName, Object[] columnData)
{
if (columnData != null)
{
// check columnData array for cases where the number of items
// doesn't match the number of rows in the existing table
if (columnData.length > dataVector.size())
{
int rowsToAdd = columnData.length - dataVector.size();
addExtraRows(rowsToAdd, columnIdentifiers.size());
}
else if (columnData.length < dataVector.size())
{
Object[] tmp = new Object[dataVector.size()];
System.arraycopy(columnData, 0, tmp, 0, columnData.length);
columnData = tmp;
}
}
for (int i = 0; i < dataVector.size(); ++i)
{
((Vector) dataVector.get(i)).add(columnData == null ? null : columnData[i]);
}
columnIdentifiers.add(columnName);
fireTableStructureChanged();
}
/**
* Adds a new row containing the specified data to the table and sends a
* {@link TableModelEvent} to all registered listeners.
*
* @param rowData the row data (<code>null</code> permitted).
*/
public void addRow(Vector rowData)
{
int rowIndex = dataVector.size();
dataVector.add(rowData);
newRowsAdded(new TableModelEvent(
this, rowIndex, rowIndex, -1, TableModelEvent.INSERT)
);
}
/**
* Adds a new row containing the specified data to the table and sends a
* {@link TableModelEvent} to all registered listeners.
*
* @param rowData the row data (<code>null</code> permitted).
*/
public void addRow(Object[] rowData)
{
addRow(convertToVector(rowData));
}
/**
* Inserts a new row into the table.
*
* @param row the row index.
* @param rowData the row data.
*/
public void insertRow(int row, Vector rowData)
{
dataVector.add(row, rowData);
fireTableRowsInserted(row, row);
}
/**
* Inserts a new row into the table.
*
* @param row the row index.
* @param rowData the row data.
*/
public void insertRow(int row, Object[] rowData)
{
insertRow(row, convertToVector(rowData));
}
/**
* Moves the rows from <code>startIndex</code> to <code>endIndex</code>
* (inclusive) to the specified row.
*
* @param startIndex the start row.
* @param endIndex the end row.
* @param toIndex the row to move to.
*/
public void moveRow(int startIndex, int endIndex, int toIndex)
{
Vector removed = new Vector();
for (int i = endIndex; i >= startIndex; i--)
{
removed.add(this.dataVector.remove(i));
}
for (int i = 0; i <= endIndex - startIndex; i++)
{
dataVector.insertElementAt(removed.get(i), toIndex);
}
int firstRow = Math.min(startIndex, toIndex);
int lastRow = Math.max(endIndex, toIndex + (endIndex - startIndex));
fireTableRowsUpdated(firstRow, lastRow);
}
/**
* Removes a row from the table and sends a {@link TableModelEvent} to
* all registered listeners.
*
* @param row the row index.
*/
public void removeRow(int row)
{
dataVector.remove(row);
fireTableRowsDeleted(row, row);
}
/**
* Returns the number of rows in the model.
*
* @return The row count.
*/
public int getRowCount()
{
return dataVector.size();
}
/**
* Returns the number of columns in the model.
*
* @return The column count.
*/
public int getColumnCount()
{
return columnIdentifiers == null ? 0 : columnIdentifiers.size();
}
/**
* Get the name of the column. If the column has the column identifier set,
* the return value is the result of the .toString() method call on that
* identifier. If the identifier is not explicitly set, the returned value
* is calculated by {@link AbstractTableModel#getColumnName(int)}.
*
* @param column the column index.
*
* @return The column name.
*/
public String getColumnName(int column)
{
String result = "";
if (columnIdentifiers == null)
result = super.getColumnName(column);
else
{
if (column < getColumnCount())
{
checkSize();
Object id = columnIdentifiers.get(column);
if (id != null)
result = id.toString();
else
result = super.getColumnName(column);
}
else
result = super.getColumnName(column);
}
return result;
}
/**
* Returns <code>true</code> if the specified cell can be modified, and
* <code>false</code> otherwise. For this implementation, the method
* always returns <code>true</code>.
*
* @param row the row index.
* @param column the column index.
*
* @return <code>true</code> in all cases.
*/
public boolean isCellEditable(int row, int column)
{
return true;
}
/**
* Returns the value at the specified cell in the table.
*
* @param row the row index.
* @param column the column index.
*
* @return The value (<code>Object</code>, possibly <code>null</code>) at
* the specified cell in the table.
*/
public Object getValueAt(int row, int column)
{
return ((Vector) dataVector.get(row)).get(column);
}
/**
* Sets the value for the specified cell in the table and sends a
* {@link TableModelEvent} to all registered listeners.
*
* @param value the value (<code>Object</code>, <code>null</code> permitted).
* @param row the row index.
* @param column the column index.
*/
public void setValueAt(Object value, int row, int column)
{
((Vector) dataVector.get(row)).set(column, value);
fireTableCellUpdated(row, column);
}
/**
* Converts the data array to a <code>Vector</code>.
*
* @param data the data array (<code>null</code> permitted).
*
* @return A vector (or <code>null</code> if the data array
* is <code>null</code>).
*/
protected static Vector convertToVector(Object[] data)
{
if (data == null)
return null;
Vector vector = new Vector(data.length);
for (int i = 0; i < data.length; i++)
vector.add(data[i]);
return vector;
}
/**
* Converts the data array to a <code>Vector</code> of rows.
*
* @param data the data array (<code>null</code> permitted).
*
* @return A vector (or <code>null</code> if the data array
* is <code>null</code>.
*/
protected static Vector convertToVector(Object[][] data)
{
if (data == null)
return null;
Vector vector = new Vector(data.length);
for (int i = 0; i < data.length; i++)
vector.add(convertToVector(data[i]));
return vector;
}
/**
* This method adds some rows to <code>dataVector</code>.
*
* @param rowsToAdd number of rows to add
* @param nbColumns size of the added rows
*/
private void addExtraRows(int rowsToAdd, int nbColumns)
{
for (int i = 0; i < rowsToAdd; i++)
{
Vector tmp = new Vector();
tmp.setSize(columnIdentifiers.size());
dataVector.add(tmp);
}
}
/**
* Checks the real columns/rows sizes against the ones returned by
* <code>getColumnCount()</code> and <code>getRowCount()</code>.
* If the supposed one are bigger, then we grow <code>columIdentifiers</code>
* and <code>dataVector</code> to their expected size.
*/
private void checkSize()
{
int columnCount = getColumnCount();
int rowCount = getRowCount();
if (columnCount > columnIdentifiers.size())
columnIdentifiers.setSize(columnCount);
if (rowCount > dataVector.size())
{
int rowsToAdd = rowCount - dataVector.size();
addExtraRows(rowsToAdd, columnCount);
}
}
}
|
/*
Copyright (c) 2010, Benjamin P. Wood and Adrian Sampson, University of Washington
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the University of Washington nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package oshajava.util.cache;
import java.util.concurrent.ConcurrentMap;
import oshajava.runtime.RuntimeMonitor;
import oshajava.util.count.Counter;
/**
* A THREAD-LOCAL direct-mapped cache front-end for a shared ConcurrentMap.
* Keys are hashed by System.identityHashCode.
* @author bpw
*
* @param <K> Type of keys
* @param <V> Type of values
*/
public class DirectMappedShadowCache<K,V> extends ShadowCache<K,V> {
public static final boolean COUNT = true && RuntimeMonitor.PROFILE;
/**
* Mask for hashing.
*/
protected final int mask;
/**
* Cache keys.
*/
protected final Object[] keys;
/**
* Cache values.
*/
protected final V[] values;
/**
* Optional counters to count hits and misses.
*/
protected final Counter hits, misses;
/**
* Create a new cache of the given size as a front end to the given map.
* @param store
* @param size
*/
public DirectMappedShadowCache(final ConcurrentMap<K,V> store, int size) {
this(store, size, null, null);
}
@SuppressWarnings("unchecked")
public DirectMappedShadowCache(final ConcurrentMap<K,V> store, int size, Counter hits, Counter misses) {
super(store);
this.keys = new Object[size];
this.values = (V[])new Object[size];
this.hits = hits;
this.misses = misses;
this.mask = size - 1;
if ((mask & size) != 0) throw new IllegalArgumentException("The size parameter must be a power of 2. (" + size + " is not.)");
}
/**
* Get the value for key. May displace others from the cache.
* @param key
* @return
*/
public V get(final K key) {
final int slot = System.identityHashCode(key) & mask;
if (keys[slot] == key) {
if (COUNT && hits != null) hits.inc();
return values[slot];
}
if (key == null) {
return null;
}
if (COUNT && misses != null) misses.inc();
final V val = store.get(key);
if (val != null) { // TODO maybe set it anyway?
keys[slot] = key;
values[slot] = val;
}
return val;
}
/**
* Put the given key/value pair in the map if not there already. Assumed not in cache. May displace others from cache.
* @param key
* @param value
*/
public V putIfAbsent(final K key, final V value) {
if (key == null || value == null) {
throw new NullPointerException();
}
final int slot = System.identityHashCode(key) & mask;
if (keys[slot] == key) {
return values[slot];
}
final V oldval = store.putIfAbsent(key, value);
keys[slot] = key;
if (oldval == null) {
values[slot] = value;
} else {
values[slot] = oldval;
}
return oldval;
}
/**
* Clear the cache (probably to allow gc...)
*/
public void flush() {
for (int i = 0; i < keys.length; i++) {
keys[i] = null;
values[i] = null;
}
}
}
|
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.model;
import java.io.File;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import com.archimatetool.model.util.IDAdapter;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Model</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link com.archimatetool.model.IArchimateModel#getPurpose <em>Purpose</em>}</li>
* <li>{@link com.archimatetool.model.IArchimateModel#getFile <em>File</em>}</li>
* <li>{@link com.archimatetool.model.IArchimateModel#getVersion <em>Version</em>}</li>
* <li>{@link com.archimatetool.model.IArchimateModel#getMetadata <em>Metadata</em>}</li>
* </ul>
*
* @see com.archimatetool.model.IArchimatePackage#getArchimateModel()
* @model extendedMetaData="name='model'"
* @generated
*/
public interface IArchimateModel extends IFolderContainer, IArchimateModelObject, IProperties {
/**
* Returns the value of the '<em><b>Purpose</b></em>' attribute.
* The default value is <code>""</code>.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Purpose</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Purpose</em>' attribute.
* @see #setPurpose(String)
* @see com.archimatetool.model.IArchimatePackage#getArchimateModel_Purpose()
* @model default=""
* extendedMetaData="kind='element'"
* @generated
*/
String getPurpose();
/**
* Sets the value of the '{@link com.archimatetool.model.IArchimateModel#getPurpose <em>Purpose</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Purpose</em>' attribute.
* @see #getPurpose()
* @generated
*/
void setPurpose(String value);
/**
* Returns the value of the '<em><b>File</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>File</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>File</em>' attribute.
* @see #setFile(File)
* @see com.archimatetool.model.IArchimatePackage#getArchimateModel_File()
* @model dataType="com.archimatetool.model.File" transient="true"
* @generated
*/
File getFile();
/**
* Sets the value of the '{@link com.archimatetool.model.IArchimateModel#getFile <em>File</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>File</em>' attribute.
* @see #getFile()
* @generated
*/
void setFile(File value);
/**
* Returns the value of the '<em><b>Version</b></em>' attribute.
* The default value is <code>""</code>.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Version</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Version</em>' attribute.
* @see #setVersion(String)
* @see com.archimatetool.model.IArchimatePackage#getArchimateModel_Version()
* @model default=""
* @generated
*/
String getVersion();
/**
* Sets the value of the '{@link com.archimatetool.model.IArchimateModel#getVersion <em>Version</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Version</em>' attribute.
* @see #getVersion()
* @generated
*/
void setVersion(String value);
/**
* Returns the value of the '<em><b>Metadata</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Metadata</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Metadata</em>' containment reference.
* @see #setMetadata(IMetadata)
* @see com.archimatetool.model.IArchimatePackage#getArchimateModel_Metadata()
* @model containment="true"
* @generated
*/
IMetadata getMetadata();
/**
* Sets the value of the '{@link com.archimatetool.model.IArchimateModel#getMetadata <em>Metadata</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Metadata</em>' containment reference.
* @see #getMetadata()
* @generated
*/
void setMetadata(IMetadata value);
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @model
* @generated
*/
void setDefaults();
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @model objectRequired="true"
* @generated
*/
IFolder getDefaultFolderForObject(EObject object);
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @model kind="operation"
* @generated
*/
IDiagramModel getDefaultDiagramModel();
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @model kind="operation"
* @generated
*/
EList<IDiagramModel> getDiagramModels();
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @model
* @generated
*/
IFolder getFolder(FolderType type);
/**
* @return The ID adapter for registering new child object IDs
*/
IDAdapter getIDAdapter();
} // IArchimateModel
|
package midistreamer;
public class MIDIKeyboardStreamPlayer {
public static int DEFAULT_RECEIVER_PORT = 3700;
public static void main(String[] args) {
MIDIStreamPlayer msp = new MIDIStreamPlayer(0, 1, DEFAULT_RECEIVER_PORT);
msp.start();
}
}
|
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.servlet;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import org.springframework.lang.Nullable;
/**
* Interface to be implemented by objects that define a mapping between
* requests and handler objects.
*
* <p>This class can be implemented by application developers, although this is not
* necessary, as {@link org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping}
* and {@link org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping}
* are included in the framework. The former is the default if no
* HandlerMapping bean is registered in the application context.
*
* <p>HandlerMapping implementations can support mapped interceptors but do not
* have to. A handler will always be wrapped in a {@link HandlerExecutionChain}
* instance, optionally accompanied by some {@link HandlerInterceptor} instances.
* The DispatcherServlet will first call each HandlerInterceptor's
* {@code preHandle} method in the given order, finally invoking the handler
* itself if all {@code preHandle} methods have returned {@code true}.
*
* <p>The ability to parameterize this mapping is a powerful and unusual
* capability of this MVC framework. For example, it is possible to write
* a custom mapping based on session state, cookie state or many other
* variables. No other MVC framework seems to be equally flexible.
*
* <p>Note: Implementations can implement the {@link org.springframework.core.Ordered}
* interface to be able to specify a sorting order and thus a priority for getting
* applied by DispatcherServlet. Non-Ordered instances get treated as lowest priority.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see org.springframework.core.Ordered
* @see org.springframework.web.servlet.handler.AbstractHandlerMapping
* @see org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping
* @see org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping
*
* 作用是根据当前请求的找到对应的 Handler,并将 Handler(执行程序)与一堆 HandlerInterceptor(拦截器,也是他来处理的)封装到 HandlerExecutionChain 对象中。返回给中央调度器
*/
public interface HandlerMapping {
/**
* Name of the {@link HttpServletRequest} attribute that contains the mapped
* handler for the best matching pattern.
* @since 4.3.21
*/
String BEST_MATCHING_HANDLER_ATTRIBUTE = HandlerMapping.class.getName() + ".bestMatchingHandler";
/**
* Name of the {@link HttpServletRequest} attribute that contains the path
* used to look up the matching handler, which depending on the configured
* {@link org.springframework.web.util.UrlPathHelper} could be the full path
* or without the context path, decoded or not, etc.
* @since 5.2
* @deprecated as of 5.3 in favor of
* {@link org.springframework.web.util.UrlPathHelper#PATH_ATTRIBUTE} and
* {@link org.springframework.web.util.ServletRequestPathUtils#PATH_ATTRIBUTE}.
* To access the cached path used for request mapping, use
* {@link org.springframework.web.util.ServletRequestPathUtils#getCachedPathValue(ServletRequest)}.
*/
@Deprecated
String LOOKUP_PATH = HandlerMapping.class.getName() + ".lookupPath";
/**
* Name of the {@link HttpServletRequest} attribute that contains the path
* within the handler mapping, in case of a pattern match, or the full
* relevant URI (typically within the DispatcherServlet's mapping) else.
* <p>Note: This attribute is not required to be supported by all
* HandlerMapping implementations. URL-based HandlerMappings will
* typically support it, but handlers should not necessarily expect
* this request attribute to be present in all scenarios.
*/
String PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE = HandlerMapping.class.getName() + ".pathWithinHandlerMapping";
/**
* Name of the {@link HttpServletRequest} attribute that contains the
* best matching pattern within the handler mapping.
* <p>Note: This attribute is not required to be supported by all
* HandlerMapping implementations. URL-based HandlerMappings will
* typically support it, but handlers should not necessarily expect
* this request attribute to be present in all scenarios.
*/
String BEST_MATCHING_PATTERN_ATTRIBUTE = HandlerMapping.class.getName() + ".bestMatchingPattern";
/**
* Name of the boolean {@link HttpServletRequest} attribute that indicates
* whether type-level mappings should be inspected.
* <p>Note: This attribute is not required to be supported by all
* HandlerMapping implementations.
*/
String INTROSPECT_TYPE_LEVEL_MAPPING = HandlerMapping.class.getName() + ".introspectTypeLevelMapping";
/**
* Name of the {@link HttpServletRequest} attribute that contains the URI
* templates map, mapping variable names to values.
* <p>Note: This attribute is not required to be supported by all
* HandlerMapping implementations. URL-based HandlerMappings will
* typically support it, but handlers should not necessarily expect
* this request attribute to be present in all scenarios.
*/
String URI_TEMPLATE_VARIABLES_ATTRIBUTE = HandlerMapping.class.getName() + ".uriTemplateVariables";
/**
* Name of the {@link HttpServletRequest} attribute that contains a map with
* URI variable names and a corresponding MultiValueMap of URI matrix
* variables for each.
* <p>Note: This attribute is not required to be supported by all
* HandlerMapping implementations and may also not be present depending on
* whether the HandlerMapping is configured to keep matrix variable content
*/
String MATRIX_VARIABLES_ATTRIBUTE = HandlerMapping.class.getName() + ".matrixVariables";
/**
* Name of the {@link HttpServletRequest} attribute that contains the set of
* producible MediaTypes applicable to the mapped handler.
* <p>Note: This attribute is not required to be supported by all
* HandlerMapping implementations. Handlers should not necessarily expect
* this request attribute to be present in all scenarios.
*/
String PRODUCIBLE_MEDIA_TYPES_ATTRIBUTE = HandlerMapping.class.getName() + ".producibleMediaTypes";
/**
* Whether this {@code HandlerMapping} instance has been enabled to use parsed
* {@link org.springframework.web.util.pattern.PathPattern}s in which case
* the {@link DispatcherServlet} automatically
* {@link org.springframework.web.util.ServletRequestPathUtils#parseAndCache parses}
* the {@code RequestPath} to make it available for
* {@link org.springframework.web.util.ServletRequestPathUtils#getParsedRequestPath
* access} in {@code HandlerMapping}s, {@code HandlerInterceptor}s, and
* other components.
* @since 5.3
*/
default boolean usesPathPatterns() {
return false;
}
/**
* Return a handler and any interceptors for this request. The choice may be made
* on request URL, session state, or any factor the implementing class chooses.
* <p>The returned HandlerExecutionChain contains a handler Object, rather than
* even a tag interface, so that handlers are not constrained in any way.
* For example, a HandlerAdapter could be written to allow another framework's
* handler objects to be used.
* <p>Returns {@code null} if no match was found. This is not an error.
* The DispatcherServlet will query all registered HandlerMapping beans to find
* a match, and only decide there is an error if none can find a handler.
* @param request current HTTP request
* @return a HandlerExecutionChain instance containing handler object and
* any interceptors, or {@code null} if no mapping found
* @throws Exception if there is an internal error
*/
@Nullable
HandlerExecutionChain getHandler(HttpServletRequest request) throws Exception;
}
|
package br.eti.souza.json;
import br.eti.souza.exception.SystemException;
import java.util.ArrayList;
import java.util.Collection;
/**
* Representa uma lista criada apartir de um json.
* @author Alan Moraes Souza
*/
public class JSONList extends ArrayList<IJSON> implements IJSON {
/** Construtor padrão. */
private JSONList() {
}
/**
* Cria JSONList apartir do json.
* @param json JSON que será convertido.
* @return JSONList correspondente.
* @throws SystemException Caso o json não seja válido.
*/
protected static JSONList fromJSON(String json) throws SystemException {
if (json == null) {
throw new SystemException("invalid.json.format");
} else {
json = json.trim();
if (!json.startsWith("[") || !json.endsWith("]")) {
throw new SystemException("invalid.json.format");
}
}
json = json.substring(1, json.length() - 1).trim();
JSONList list = new JSONList();
while (!json.isEmpty()) {
String value = JSON.utilityGetStartAsString(json);
if (value != null) {
json = json.substring(value.length()).trim();
list.add(JSONValue.fromJSON(value));
if (json.startsWith(",")) {
json = json.substring(1).trim();
continue;
} else {
break;
}
}
value = JSON.utilityGetStartAsBoolean(json);
if (value != null) {
json = json.substring(value.length()).trim();
list.add(JSONValue.fromJSON(value));
if (json.startsWith(",")) {
json = json.substring(1).trim();
continue;
} else {
break;
}
}
value = JSON.utilityGetStartAsNumber(json);
if (value != null) {
json = json.substring(value.length()).trim();
list.add(JSONValue.fromJSON(value));
if (json.startsWith(",")) {
json = json.substring(1).trim();
continue;
} else {
break;
}
}
value = JSON.utilityGetStartAsNull(json);
if (value != null) {
json = json.substring(value.length()).trim();
list.add(JSONValue.fromJSON(value));
if (json.startsWith(",")) {
json = json.substring(1).trim();
continue;
} else {
break;
}
}
value = JSON.utilityGetStartAsList(json);
if (value != null) {
json = json.substring(value.length()).trim();
list.add(JSONList.fromJSON(value));
if (json.startsWith(",")) {
json = json.substring(1).trim();
continue;
} else {
break;
}
}
value = JSON.utilityGetStartAsObjeto(json);
if (value != null) {
json = json.substring(value.length()).trim();
list.add(JSONObject.fromJSON(value));
if (json.startsWith(",")) {
json = json.substring(1).trim();
continue;
} else {
break;
}
}
}
if (!json.isEmpty()) {
throw new SystemException("invalid.json.format");
}
return list;
}
/**
* Cria JSONList apartir do object.
* @param object Objeto que será convertido.
* @return JSONList correspondente.
* @throws SystemException Caso o objeto não seja válido.
*/
protected static JSONList fromObject(Object object) throws SystemException {
if (object == null) {
throw new SystemException("invalid.object.null");
} else if (object instanceof byte[]) {
JSONList result = new JSONList();
byte[] list = (byte[]) object;
for (Object item : list) {
result.add(JSONValue.fromObject(item));
}
return result;
} else if (object instanceof int[]) {
JSONList result = new JSONList();
int[] list = (int[]) object;
for (Object item : list) {
result.add(JSONValue.fromObject(item));
}
return result;
} else if (object instanceof long[]) {
JSONList result = new JSONList();
long[] list = (long[]) object;
for (Object item : list) {
result.add(JSONValue.fromObject(item));
}
return result;
} else if (object instanceof float[]) {
JSONList result = new JSONList();
float[] list = (float[]) object;
for (Object item : list) {
result.add(JSONValue.fromObject(item));
}
return result;
} else if (object instanceof double[]) {
JSONList result = new JSONList();
double[] list = (double[]) object;
for (Object item : list) {
result.add(JSONValue.fromObject(item));
}
return result;
} else if (object instanceof char[]) {
JSONList result = new JSONList();
char[] list = (char[]) object;
for (Object item : list) {
result.add(JSONValue.fromObject(item));
}
return result;
} else if (object instanceof Object[]) {
JSONList result = new JSONList();
Object[] list = (Object[]) object;
for (Object item : list) {
result.add(JSON.recursiveFrom(item));
}
return result;
} else if (Collection.class.isInstance(object)) {
JSONList result = new JSONList();
Collection list = Collection.class.cast(object);
for (Object item : list) {
result.add(JSON.recursiveFrom(item));
}
return result;
} else {
throw new SystemException("invalid.object.unknow.list");
}
}
/**
* Retorna json correspondente a lista.
* @return JSON conrespondente a lista.
*/
@Override
public String toJSON() {
if (this.isEmpty()) {
return "[]";
} else {
StringBuilder json = new StringBuilder("[ ").append(this.get(0).toJSON());
for (int i = 1; i < this.size(); i++) {
json.append(", ").append(this.get(i).toJSON());
}
return json.append(" ]").toString();
}
}
}
|
/*
* Copyright (c) 2015 Nokia Solutions and Networks. All rights reserved.
*/
package com.nsn.ood.cls.rest.resource.internal;
import java.io.IOException;
import java.io.InputStream;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.apache.felix.dm.annotation.api.Component;
import org.apache.felix.dm.annotation.api.ServiceDependency;
import com.nsn.ood.cls.core.service.internal.TestService;
import com.nsn.ood.cls.core.util.ApiVersionChooser.API_VERSION;
import com.nsn.ood.cls.model.CLSMediaType;
import com.nsn.ood.cls.model.internal.Tasks;
import com.nsn.ood.cls.rest.resource.CLSApplication;
import com.nsn.ood.cls.util.log.Loggable;
import com.nsn.ood.cls.util.log.Loggable.Level;
import io.swagger.v3.oas.annotations.Operation;
/**
* @author marynows
*
*/
@Component(provides = TestResource.class)
@Path(CLSApplication.INTERNAL + "/test")
@Produces(CLSMediaType.APPLICATION_CLS_JSON)
@Loggable(value = Level.WARNING, duration = true)
public class TestResource {
public static final String UPLOAD_URI = CLSApplication.INTERNAL + "/test/upload";
@ServiceDependency
private TestService testService;
@GET
@Path("upload")
@Produces(MediaType.TEXT_HTML)
@Operation(hidden = true)
public String upload() {
final InputStream is = TestResource.class.getResourceAsStream("/test/upload.html");
try {
return IOUtils.toString(is);
} catch (final IOException e) {
return e.getMessage();
}
}
@PUT
@Path("reloadTasks")
@Consumes(MediaType.APPLICATION_JSON)
@Operation(hidden = true)
public Response reloadTasks(final Tasks tasks) {
this.testService.reloadTasks(tasks);
return Response.noContent().build();
}
@PUT
@Path("reloadTargetId")
@Consumes(MediaType.APPLICATION_JSON)
@Operation(hidden = true)
public Response reloadTargetId() {
final String targetId = this.testService.reloadTargetId();
return Response.ok().entity(targetId).build();
}
@PUT
@Path("runTask")
@Consumes(MediaType.APPLICATION_JSON)
@Operation(hidden = true)
public Response runTask(final String taskName) {
this.testService.runTask(taskName);
return Response.noContent().build();
}
@PUT
@Path("setAPIVersion")
@Consumes(MediaType.APPLICATION_JSON)
@Operation(hidden = true)
public Response setAPIVersion(final API_VERSION apiVersion) {
this.testService.setApiVersion(apiVersion);
return Response.noContent().build();
}
}
|
package com.webs.api;
import java.io.IOException;
import java.util.List;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.NameValuePair;
import org.apache.commons.httpclient.methods.DeleteMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.PutMethod;
import com.webs.api.exception.UsageErrorApiException;
import com.webs.api.http.AbstractHttpApiClientAware;
import com.webs.api.model.Site;
import com.webs.api.model.id.SiteId;
/**
* @author Patrick Carroll
*/
public class SiteApiImpl extends AbstractHttpApiClientAware implements SiteApi {
public SiteApiImpl() {
}
public Site getSite(final SiteId siteId) {
return httpApiClient.httpRequestMapper(
new GetMethod(httpApiClient.getApiPath() + "sites/"
+ siteId.toString()),
HttpStatus.SC_OK, new WebsApiModelMapper<Site>(Site.class));
}
public void updateSite(final Site site) {
String identifier;
if (site.getId() != null)
identifier = site.getId().toString();
else if (site.getUsername() != null)
identifier = site.getUsername().toString();
else
throw new UsageErrorApiException("updateSite requires either site.id or site.username to be set");
PutMethod put = new PutMethod(httpApiClient.getApiPath()
+ "sites/" + identifier);
try {
put.setRequestBody(jsonMapper.writeValueAsString(site));
} catch (IOException e) {
throw new UsageErrorApiException("Error mapping object");
}
httpApiClient.httpRequest(put, HttpStatus.SC_NO_CONTENT);
}
public void deleteSite(final SiteId siteId) {
httpApiClient.httpRequest(new DeleteMethod(httpApiClient.getApiPath() + "sites/" + siteId.toString()), HttpStatus.SC_NO_CONTENT);
}
}
|
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.opendistroforelasticsearch.sql.intgtest;
import com.amazon.opendistroforelasticsearch.sql.plugin.SearchDao;
import com.amazon.opendistroforelasticsearch.sql.exception.SqlParseException;
import com.amazon.opendistroforelasticsearch.sql.query.SqlElasticSearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.junit.Test;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Map;
import java.util.Set;
import java.util.HashSet;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
public class DateFormatTest {
private static final String SELECT_FROM =
"SELECT insert_time " +
"FROM " + TestsConstants.TEST_INDEX_ONLINE + "/online ";
/**
* All of the following tests use UTC as their date_format timezone as this is the same timezone of the data
* being queried. This is to prevent discrepancies in the Elasticsearch query that is eventually made and the
* actual field data that is being checked for the integration tests.
*/
@Test
public void equalTo() {
assertThat(
query(SELECT_FROM + "WHERE date_format(insert_time, 'YYYY-MM-dd', 'UTC') = '2014-08-17'"),
containsInAnyOrder("2014-08-17")
);
}
@Test
public void lessThan() {
assertThat(
query(SELECT_FROM + "WHERE date_format(insert_time, 'YYYY-MM-dd', 'UTC') < '2014-08-18'"),
containsInAnyOrder("2014-08-17")
);
}
@Test
public void lessThanOrEqualTo() {
assertThat(
query(SELECT_FROM + "WHERE date_format(insert_time, 'YYYY-MM-dd', 'UTC') <= '2014-08-18'"),
containsInAnyOrder("2014-08-17", "2014-08-18")
);
}
@Test
public void greaterThan() {
assertThat(
query(SELECT_FROM + "WHERE date_format(insert_time, 'YYYY-MM-dd', 'UTC') > '2014-08-23'"),
containsInAnyOrder("2014-08-24")
);
}
/**
* Large LIMIT values were given for some of these queries since the default result size of the query is 200 and
* this ends up excluding some of the expected values causing the assertion to fail. LIMIT overrides this.
*/
@Test
public void greaterThanOrEqualTo() {
assertThat(
query(SELECT_FROM + "WHERE date_format(insert_time, 'YYYY-MM-dd', 'UTC') >= '2014-08-23' LIMIT 1000"),
containsInAnyOrder("2014-08-23", "2014-08-24")
);
}
@Test
public void and() {
assertThat(
query(SELECT_FROM +
"WHERE date_format(insert_time, 'YYYY-MM-dd', 'UTC') >= '2014-08-21' " +
"AND date_format(insert_time, 'YYYY-MM-dd', 'UTC') <= '2014-08-23' LIMIT 1000"),
containsInAnyOrder("2014-08-21", "2014-08-22", "2014-08-23")
);
}
@Test
public void or() {
assertThat(
query(SELECT_FROM +
"WHERE date_format(insert_time, 'YYYY-MM-dd', 'UTC') < '2014-08-18' " +
"OR date_format(insert_time, 'YYYY-MM-dd', 'UTC') > '2014-08-23'"),
containsInAnyOrder("2014-08-17", "2014-08-24")
);
}
private Set<Object> query(String query) {
try {
SearchDao searchDao = MainTestSuite.getSearchDao();
SqlElasticSearchRequestBuilder select = (SqlElasticSearchRequestBuilder) searchDao.explain(query).explain();
return getResult(((SearchResponse) select.get()).getHits(), "insert_time");
} catch (SQLFeatureNotSupportedException | SqlParseException e) {
throw new RuntimeException(e);
}
}
private Set<Object> getResult(SearchHits response, String fieldName) {
DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.SIMPLE_DATE_FORMAT);
SearchHit[] hits = response.getHits();
Set<Object> result = new HashSet<>();
for (SearchHit hit : hits) {
Map<String, Object> source = hit.getSourceAsMap();
DateTime date = new DateTime(source.get(fieldName), DateTimeZone.UTC);
String formattedDate = formatter.print(date);
result.add(formattedDate);
}
return result;
}
}
|
/*
* MIT License
*
* Copyright (c) 2021 MASES s.r.l.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**************************************************************************************
* <auto-generated>
* This code was generated from a template using JCOReflector
*
* Manual changes to this file may cause unexpected behavior in your application.
* Manual changes to this file will be overwritten if the code is regenerated.
* </auto-generated>
*************************************************************************************/
package system.windows.forms;
import org.mases.jcobridge.*;
import org.mases.jcobridge.netreflection.*;
import java.util.ArrayList;
// Import section
import system.windows.forms.DataGridViewCellCancelEventArgs;
import system.windows.forms.DataGridViewDataErrorContexts;
/**
* The base .NET class managing System.Windows.Forms.DataGridViewDataErrorEventArgs, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089. Extends {@link NetObject}.
* <p>
*
* See: <a href="https://docs.microsoft.com/en-us/dotnet/api/System.Windows.Forms.DataGridViewDataErrorEventArgs" target="_top">https://docs.microsoft.com/en-us/dotnet/api/System.Windows.Forms.DataGridViewDataErrorEventArgs</a>
*/
public class DataGridViewDataErrorEventArgs extends DataGridViewCellCancelEventArgs {
/**
* Fully assembly qualified name: System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
*/
public static final String assemblyFullName = "System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089";
/**
* Assembly name: System.Windows.Forms
*/
public static final String assemblyShortName = "System.Windows.Forms";
/**
* Qualified class name: System.Windows.Forms.DataGridViewDataErrorEventArgs
*/
public static final String className = "System.Windows.Forms.DataGridViewDataErrorEventArgs";
static JCOBridge bridge = JCOBridgeInstance.getInstance(assemblyFullName);
/**
* The type managed from JCOBridge. See {@link JCType}
*/
public static JCType classType = createType();
static JCEnum enumInstance = null;
JCObject classInstance = null;
static JCType createType() {
try {
String classToCreate = className + ", "
+ (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Creating %s", classToCreate);
JCType typeCreated = bridge.GetType(classToCreate);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Created: %s",
(typeCreated != null) ? typeCreated.toString() : "Returned null value");
return typeCreated;
} catch (JCException e) {
JCOReflector.writeLog(e);
return null;
}
}
void addReference(String ref) throws Throwable {
try {
bridge.AddReference(ref);
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
public DataGridViewDataErrorEventArgs(Object instance) throws Throwable {
super(instance);
if (instance instanceof JCObject) {
classInstance = (JCObject) instance;
} else
throw new Exception("Cannot manage object, it is not a JCObject");
}
public String getJCOAssemblyName() {
return assemblyFullName;
}
public String getJCOClassName() {
return className;
}
public String getJCOObjectName() {
return className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
}
public Object getJCOInstance() {
return classInstance;
}
public void setJCOInstance(JCObject instance) {
classInstance = instance;
super.setJCOInstance(classInstance);
}
public JCType getJCOType() {
return classType;
}
/**
* Try to cast the {@link IJCOBridgeReflected} instance into {@link DataGridViewDataErrorEventArgs}, a cast assert is made to check if types are compatible.
* @param from {@link IJCOBridgeReflected} instance to be casted
* @return {@link DataGridViewDataErrorEventArgs} instance
* @throws java.lang.Throwable in case of error during cast operation
*/
public static DataGridViewDataErrorEventArgs cast(IJCOBridgeReflected from) throws Throwable {
NetType.AssertCast(classType, from);
return new DataGridViewDataErrorEventArgs(from.getJCOInstance());
}
// Constructors section
public DataGridViewDataErrorEventArgs() throws Throwable {
}
public DataGridViewDataErrorEventArgs(NetException exception, int columnIndex, int rowIndex, DataGridViewDataErrorContexts context) throws Throwable, system.ArgumentOutOfRangeException {
try {
// add reference to assemblyName.dll file
addReference(JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
setJCOInstance((JCObject)classType.NewObject(exception == null ? null : exception.getJCOInstance(), columnIndex, rowIndex, context == null ? null : context.getJCOInstance()));
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
// Methods section
// Properties section
public boolean getThrowException() throws Throwable {
if (classInstance == null)
throw new UnsupportedOperationException("classInstance is null.");
try {
return (boolean)classInstance.Get("ThrowException");
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
public void setThrowException(boolean ThrowException) throws Throwable, system.ArgumentNullException, system.ArgumentException, system.InvalidOperationException, system.MissingMethodException, system.reflection.TargetInvocationException, system.NotImplementedException, system.NotSupportedException, system.ArgumentOutOfRangeException, system.globalization.CultureNotFoundException, system.resources.MissingManifestResourceException, system.ObjectDisposedException {
if (classInstance == null)
throw new UnsupportedOperationException("classInstance is null.");
try {
classInstance.Set("ThrowException", ThrowException);
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
public NetException getException() throws Throwable {
if (classInstance == null)
throw new UnsupportedOperationException("classInstance is null.");
try {
JCObject val = (JCObject)classInstance.Get("Exception");
return new NetException(val);
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
public DataGridViewDataErrorContexts getContext() throws Throwable {
if (classInstance == null)
throw new UnsupportedOperationException("classInstance is null.");
try {
JCObject val = (JCObject)classInstance.Get("Context");
return new DataGridViewDataErrorContexts(val);
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
// Instance Events section
}
|
/*
* Copyright 2021 VMware, Inc.
* SPDX-License-Identifier: Apache License 2.0
*/
package com.vmware.avi.sdk.model;
import java.util.*;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
/**
* The SensitiveLogProfile is a POJO class extends AviRestResource that used for creating
* SensitiveLogProfile.
*
* @version 1.0
* @since
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_NULL)
public class SensitiveLogProfile {
@JsonProperty("header_field_rules")
private List<SensitiveFieldRule> headerFieldRules = null;
@JsonProperty("uri_query_field_rules")
private List<SensitiveFieldRule> uriQueryFieldRules = null;
@JsonProperty("waf_field_rules")
private List<SensitiveFieldRule> wafFieldRules = null;
/**
* This is the getter method this will return the attribute value.
* Match sensitive header fields in http application log.
* Field introduced in 17.2.10, 18.1.2.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return headerFieldRules
*/
public List<SensitiveFieldRule> getHeaderFieldRules() {
return headerFieldRules;
}
/**
* This is the setter method. this will set the headerFieldRules
* Match sensitive header fields in http application log.
* Field introduced in 17.2.10, 18.1.2.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return headerFieldRules
*/
public void setHeaderFieldRules(List<SensitiveFieldRule> headerFieldRules) {
this.headerFieldRules = headerFieldRules;
}
/**
* This is the setter method this will set the headerFieldRules
* Match sensitive header fields in http application log.
* Field introduced in 17.2.10, 18.1.2.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return headerFieldRules
*/
public SensitiveLogProfile addHeaderFieldRulesItem(SensitiveFieldRule headerFieldRulesItem) {
if (this.headerFieldRules == null) {
this.headerFieldRules = new ArrayList<SensitiveFieldRule>();
}
this.headerFieldRules.add(headerFieldRulesItem);
return this;
}
/**
* This is the getter method this will return the attribute value.
* Match sensitive uri query params in http application log.
* Query params from the uri are extracted and checked for matching sensitive parameter names.
* A successful match will mask the parameter values in accordance with this rule action.
* Field introduced in 20.1.7, 21.1.2.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return uriQueryFieldRules
*/
public List<SensitiveFieldRule> getUriQueryFieldRules() {
return uriQueryFieldRules;
}
/**
* This is the setter method. this will set the uriQueryFieldRules
* Match sensitive uri query params in http application log.
* Query params from the uri are extracted and checked for matching sensitive parameter names.
* A successful match will mask the parameter values in accordance with this rule action.
* Field introduced in 20.1.7, 21.1.2.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return uriQueryFieldRules
*/
public void setUriQueryFieldRules(List<SensitiveFieldRule> uriQueryFieldRules) {
this.uriQueryFieldRules = uriQueryFieldRules;
}
/**
* This is the setter method this will set the uriQueryFieldRules
* Match sensitive uri query params in http application log.
* Query params from the uri are extracted and checked for matching sensitive parameter names.
* A successful match will mask the parameter values in accordance with this rule action.
* Field introduced in 20.1.7, 21.1.2.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return uriQueryFieldRules
*/
public SensitiveLogProfile addUriQueryFieldRulesItem(SensitiveFieldRule uriQueryFieldRulesItem) {
if (this.uriQueryFieldRules == null) {
this.uriQueryFieldRules = new ArrayList<SensitiveFieldRule>();
}
this.uriQueryFieldRules.add(uriQueryFieldRulesItem);
return this;
}
/**
* This is the getter method this will return the attribute value.
* Match sensitive waf log fields in http application log.
* Field introduced in 17.2.13, 18.1.3.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return wafFieldRules
*/
public List<SensitiveFieldRule> getWafFieldRules() {
return wafFieldRules;
}
/**
* This is the setter method. this will set the wafFieldRules
* Match sensitive waf log fields in http application log.
* Field introduced in 17.2.13, 18.1.3.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return wafFieldRules
*/
public void setWafFieldRules(List<SensitiveFieldRule> wafFieldRules) {
this.wafFieldRules = wafFieldRules;
}
/**
* This is the setter method this will set the wafFieldRules
* Match sensitive waf log fields in http application log.
* Field introduced in 17.2.13, 18.1.3.
* Default value when not specified in API or module is interpreted by Avi Controller as null.
* @return wafFieldRules
*/
public SensitiveLogProfile addWafFieldRulesItem(SensitiveFieldRule wafFieldRulesItem) {
if (this.wafFieldRules == null) {
this.wafFieldRules = new ArrayList<SensitiveFieldRule>();
}
this.wafFieldRules.add(wafFieldRulesItem);
return this;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SensitiveLogProfile objSensitiveLogProfile = (SensitiveLogProfile) o;
return Objects.equals(this.headerFieldRules, objSensitiveLogProfile.headerFieldRules)&&
Objects.equals(this.wafFieldRules, objSensitiveLogProfile.wafFieldRules)&&
Objects.equals(this.uriQueryFieldRules, objSensitiveLogProfile.uriQueryFieldRules);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class SensitiveLogProfile {\n");
sb.append(" headerFieldRules: ").append(toIndentedString(headerFieldRules)).append("\n");
sb.append(" uriQueryFieldRules: ").append(toIndentedString(uriQueryFieldRules)).append("\n");
sb.append(" wafFieldRules: ").append(toIndentedString(wafFieldRules)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
/*
* Copyright 2019 Arcus Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iris.agent.zw.events;
public class ZWProtocolVersionEvent implements ZWEvent {
private final int version;
private final int subversion;
public ZWProtocolVersionEvent(int version, int subversion) {
this.version = version;
this.subversion = subversion;
}
@Override
public ZWEventType getType() {
return ZWEventType.PROTOCOL_VERSION;
}
public int getVersion() {
return version;
}
public int getSubversion() {
return subversion;
}
}
|
/*
* MIT License
*
* Copyright (c) 2017-2019 nuls.io
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package io.nuls.contract.model.dto;
import io.nuls.base.basic.AddressTool;
import io.nuls.base.data.CoinTo;
import io.nuls.core.rpc.model.ApiModel;
import io.nuls.core.rpc.model.ApiModelProperty;
import static io.nuls.contract.util.ContractUtil.bigInteger2String;
/**
* @author: PierreLuo
* @date: 2019-03-14
*/
@ApiModel
public class OutputDto {
@ApiModelProperty(description = "输出地址")
private String address;
@ApiModelProperty(description = "资产链ID")
private int assetsChainId;
@ApiModelProperty(description = "资产ID")
private int assetsId;
@ApiModelProperty(description = "输出金额")
private String amount;
@ApiModelProperty(description = "锁定时间")
private long lockTime;
public OutputDto(CoinTo to) {
this.address = AddressTool.getStringAddressByBytes(to.getAddress());
this.assetsChainId = to.getAssetsChainId();
this.assetsId = to.getAssetsId();
this.amount = bigInteger2String(to.getAmount());
this.lockTime = to.getLockTime();
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public int getAssetsChainId() {
return assetsChainId;
}
public void setAssetsChainId(int assetsChainId) {
this.assetsChainId = assetsChainId;
}
public int getAssetsId() {
return assetsId;
}
public void setAssetsId(int assetsId) {
this.assetsId = assetsId;
}
public String getAmount() {
return amount;
}
public void setAmount(String amount) {
this.amount = amount;
}
public long getLockTime() {
return lockTime;
}
public void setLockTime(long lockTime) {
this.lockTime = lockTime;
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.beans;
/**
*
* @author andre
*/
public class Genero {
private int id;
private String nome;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public String toString() {
return getNome();
}
}
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.contextualsearch;
import android.net.Uri;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.browser.compositor.bottombar.OverlayContentDelegate;
import org.chromium.chrome.browser.compositor.bottombar.OverlayContentProgressObserver;
import org.chromium.chrome.browser.compositor.bottombar.OverlayPanelContent;
import org.chromium.chrome.browser.compositor.bottombar.OverlayPanelContentFactory;
import org.chromium.content_public.browser.WebContents;
import org.chromium.content_public.browser.WebContentsObserver;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeoutException;
/**
* Implements a fake Contextual Search server, for testing purposes.
* TODO(donnd): add more functionality to this class once the overall approach has been validated.
* TODO(donnd): rename this class when we refactor and rename the interface it implements. Should
* be something like ContextualSearchFakeEnvironment.
*/
@VisibleForTesting
class ContextualSearchFakeServer
implements ContextualSearchNetworkCommunicator, OverlayPanelContentFactory {
static final long LOGGED_EVENT_ID = 1L << 50; // Arbitrary value larger than 32 bits.
private final ContextualSearchPolicy mPolicy;
private final ContextualSearchManagerTest mManagerTest;
private final ContextualSearchNetworkCommunicator mBaseManager;
private final OverlayContentDelegate mContentDelegate;
private final OverlayContentProgressObserver mProgressObserver;
private final ChromeActivity mActivity;
private final ArrayList<String> mRemovedUrls = new ArrayList<String>();
private final Map<String, FakeTapSearch> mFakeTapSearches = new HashMap<>();
private final Map<String, FakeLongPressSearch> mFakeLongPressSearches = new HashMap<>();
private final Map<String, FakeSlowResolveSearch> mFakeSlowResolveSearches = new HashMap<>();
private FakeTapSearch mActiveFakeTapSearch;
private String mLoadedUrl;
private int mLoadedUrlCount;
private boolean mUseInvalidLowPriorityPath;
private String mSearchTermRequested;
private boolean mShouldUseHttps;
private boolean mIsOnline = true;
private boolean mIsExactResolve;
private boolean mDidEverCallWebContentsOnShow;
private class ContentsObserver extends WebContentsObserver {
private boolean mIsVisible;
private ContentsObserver(WebContents webContents) {
super(webContents);
}
private boolean isVisible() {
return mIsVisible;
}
@Override
public void wasShown() {
mIsVisible = true;
mDidEverCallWebContentsOnShow = true;
}
@Override
public void wasHidden() {
mIsVisible = false;
}
};
private ContentsObserver mContentsObserver;
boolean isContentVisible() {
return mContentsObserver.isVisible();
}
//============================================================================================
// FakeSearch
//============================================================================================
/**
* Abstract class that represents a fake contextual search.
*/
public abstract class FakeSearch {
private final String mNodeId;
/**
* @param nodeId The id of the node where the touch event will be simulated.
*/
FakeSearch(String nodeId) {
mNodeId = nodeId;
}
/**
* Simulates a fake search.
*
* @throws InterruptedException
* @throws TimeoutException
*/
public abstract void simulate() throws InterruptedException, TimeoutException;
/**
* @return The search term that will be used in the contextual search.
*/
public abstract String getSearchTerm();
/**
* @return The id of the node where the touch event will be simulated.
*/
public String getNodeId() {
return mNodeId;
}
}
//============================================================================================
// FakeLongPressSearch
//============================================================================================
/**
* Class that represents a fake long-press triggered contextual search.
*/
public class FakeLongPressSearch extends FakeSearch {
private final String mSearchTerm;
/**
* @param nodeId The id of the node where the touch event will be simulated.
* @param searchTerm The expected text that the node should contain.
*/
FakeLongPressSearch(String nodeId, String searchTerm) {
super(nodeId);
mSearchTerm = searchTerm;
}
@Override
public void simulate() throws InterruptedException, TimeoutException {
mManagerTest.longPressNode(getNodeId());
mManagerTest.waitForSelectionToBe(mSearchTerm);
}
@Override
public String getSearchTerm() {
return mSearchTerm;
}
}
//============================================================================================
// FakeTapSearch
//============================================================================================
/**
* Class that represents a fake tap triggered contextual search.
*/
public class FakeTapSearch extends FakeSearch {
protected final ResolvedSearchTerm mResolvedSearchTerm;
boolean mDidStartResolution;
boolean mDidFinishResolution;
/**
* @param nodeId The id of the node where the touch event will be simulated.
* @param resolvedSearchTerm The details of the server's Resolve request response, which
* tells us what to search for.
*/
FakeTapSearch(String nodeId, ResolvedSearchTerm resolvedSearchTerm) {
super(nodeId);
mResolvedSearchTerm = resolvedSearchTerm;
}
/**
* @param nodeId The id of the node where the touch event will be simulated.
* @param isNetworkUnavailable Whether the network is unavailable.
* @param responseCode The HTTP response code of the resolution.
* @param searchTerm The resolved search term.
* @param displayText The display text.
*/
FakeTapSearch(String nodeId, boolean isNetworkUnavailable, int responseCode,
String searchTerm, String displayText) {
this(nodeId,
new ResolvedSearchTerm
.Builder(isNetworkUnavailable, responseCode, searchTerm, displayText)
.build());
}
@Override
public void simulate() throws InterruptedException, TimeoutException {
mActiveFakeTapSearch = this;
// When a resolution is needed, the simulation does not start until the system
// requests one, and it does not finish until the simulated resolution happens.
mDidStartResolution = false;
mDidFinishResolution = false;
mManagerTest.clickNode(getNodeId());
mManagerTest.waitForSelectionToBe(getSearchTerm());
if (mPolicy.shouldPreviousGestureResolve()) {
// Now wait for the Search Term Resolution to start.
mManagerTest.waitForSearchTermResolutionToStart(this);
// Simulate a Search Term Resolution.
simulateSearchTermResolution();
// Now wait for the simulated Search Term Resolution to finish.
mManagerTest.waitForSearchTermResolutionToFinish(this);
} else {
mDidFinishResolution = true;
}
}
@Override
public String getSearchTerm() {
return mResolvedSearchTerm.searchTerm();
}
/**
* Notifies that a Search Term Resolution has started.
*/
public void notifySearchTermResolutionStarted() {
mDidStartResolution = true;
}
/**
* @return Whether the Search Term Resolution has started.
*/
public boolean didStartSearchTermResolution() {
return mDidStartResolution;
}
/**
* @return Whether the Search Term Resolution has finished.
*/
public boolean didFinishSearchTermResolution() {
return mDidFinishResolution;
}
/**
* Simulates a Search Term Resolution.
*/
protected void simulateSearchTermResolution() {
mManagerTest.runOnMainSync(getRunnable());
}
/**
* @return A Runnable to handle the fake Search Term Resolution.
*/
private Runnable getRunnable() {
return new Runnable() {
@Override
public void run() {
if (!mDidFinishResolution) {
handleSearchTermResolutionResponse(mResolvedSearchTerm);
mActiveFakeTapSearch = null;
mDidFinishResolution = true;
}
}
};
}
ResolvedSearchTerm getResolvedSearchTerm() {
return mResolvedSearchTerm;
}
}
//============================================================================================
// FakeTapSearch
//============================================================================================
/**
* Class that represents a fake tap triggered contextual search that is slow to resolve.
*/
public class FakeSlowResolveSearch extends FakeTapSearch {
/**
* @param nodeId The id of the node where the touch event will be simulated.
* @param resolvedSearchTerm The details of the server's Resolve request response, which
* tells us what to search for.
*/
FakeSlowResolveSearch(String nodeId, ResolvedSearchTerm resolvedSearchTerm) {
super(nodeId, resolvedSearchTerm);
}
/**
* @param nodeId The id of the node where the touch event will be simulated.
* @param isNetworkUnavailable Whether the network is unavailable.
* @param responseCode The HTTP response code of the resolution.
* @param searchTerm The resolved search term.
* @param displayText The display text.
*/
FakeSlowResolveSearch(String nodeId, boolean isNetworkUnavailable, int responseCode,
String searchTerm, String displayText) {
this(nodeId,
new ResolvedSearchTerm
.Builder(isNetworkUnavailable, responseCode, searchTerm, displayText)
.build());
}
@Override
public void simulate() throws InterruptedException, TimeoutException {
mActiveFakeTapSearch = this;
// When a resolution is needed, the simulation does not start until the system
// requests one, and it does not finish until the simulated resolution happens.
mDidStartResolution = false;
mDidFinishResolution = false;
mManagerTest.clickNode(getNodeId());
mManagerTest.waitForSelectionToBe(getSearchTerm());
if (mPolicy.shouldPreviousGestureResolve()) {
// Now wait for the Search Term Resolution to start.
mManagerTest.waitForSearchTermResolutionToStart(this);
} else {
throw new RuntimeException("Tried to simulate a slow resolving search when "
+ "not resolving!");
}
}
/**
* Finishes the resolving of a slow-resolving Tap search.
* @throws InterruptedException
* @throws TimeoutException
*/
void finishResolve() throws InterruptedException, TimeoutException {
// Simulate a Search Term Resolution.
simulateSearchTermResolution();
// Now wait for the simulated Search Term Resolution to finish.
mManagerTest.waitForSearchTermResolutionToFinish(this);
}
}
//============================================================================================
// OverlayPanelContentWrapper
//============================================================================================
/**
* A wrapper around OverlayPanelContent to be used during tests.
*/
public class OverlayPanelContentWrapper extends OverlayPanelContent {
OverlayPanelContentWrapper(OverlayContentDelegate contentDelegate,
OverlayContentProgressObserver progressObserver, ChromeActivity activity,
float barHeight) {
super(contentDelegate, progressObserver, activity, false, barHeight);
}
@Override
public void loadUrl(String url, boolean shouldLoadImmediately) {
if (mUseInvalidLowPriorityPath && isLowPriorityUrl(url)) {
url = makeInvalidUrl(url);
}
mLoadedUrl = url;
mLoadedUrlCount++;
super.loadUrl(url, shouldLoadImmediately);
mContentsObserver = new ContentsObserver(getWebContents());
}
@Override
public void removeLastHistoryEntry(String url, long timeInMs) {
// Override to prevent call to native code.
mRemovedUrls.add(url);
}
/**
* Creates an invalid version of the given URL.
* @param baseUrl The URL to build upon / modify.
* @return The same URL but with an invalid path.
*/
private String makeInvalidUrl(String baseUrl) {
return Uri.parse(baseUrl).buildUpon().appendPath("invalid").build().toString();
}
/**
* @return Whether the given URL is a low-priority URL.
*/
private boolean isLowPriorityUrl(String url) {
// Just check if it's set up to prefetch.
return url.contains("&pf=c");
}
}
//============================================================================================
// ContextualSearchFakeServer
//============================================================================================
/**
* Constructs a fake Contextual Search server that will callback to the given baseManager.
* @param baseManager The manager to call back to for server responses.
*/
@VisibleForTesting
ContextualSearchFakeServer(ContextualSearchPolicy policy,
ContextualSearchManagerTest managerTest,
ContextualSearchNetworkCommunicator baseManager,
OverlayContentDelegate contentDelegate,
OverlayContentProgressObserver progressObserver,
ChromeActivity activity) {
mPolicy = policy;
mManagerTest = managerTest;
mBaseManager = baseManager;
mContentDelegate = contentDelegate;
mProgressObserver = progressObserver;
mActivity = activity;
}
@Override
public OverlayPanelContent createNewOverlayPanelContent() {
return new OverlayPanelContentWrapper(mContentDelegate, mProgressObserver, mActivity,
mManagerTest.getPanel().getBarHeight());
}
/**
* @return The search term requested, or {@code null} if no search term was requested.
*/
@VisibleForTesting
String getSearchTermRequested() {
return mSearchTermRequested;
}
/**
* @return the loaded search result page URL if any was requested.
*/
@VisibleForTesting
String getLoadedUrl() {
return mLoadedUrl;
}
/**
* @return The number of times we loaded a URL in the Content View.
*/
@VisibleForTesting
int getLoadedUrlCount() {
return mLoadedUrlCount;
}
/**
* Sets whether to return an HTTPS URL instead of HTTP, from {@link #getBasePageUrl}.
*/
@VisibleForTesting
void setShouldUseHttps(boolean setting) {
mShouldUseHttps = setting;
}
/**
* @return Whether onShow() was ever called for the current {@code WebContents}.
*/
@VisibleForTesting
boolean didEverCallWebContentsOnShow() {
return mDidEverCallWebContentsOnShow;
}
/**
* Sets whether the device is currently online or not.
*/
@VisibleForTesting
void setIsOnline(boolean isOnline) {
mIsOnline = isOnline;
}
/**
* Resets the fake server's member data.
*/
@VisibleForTesting
void reset() {
mLoadedUrl = null;
mSearchTermRequested = null;
mShouldUseHttps = false;
mIsOnline = true;
mLoadedUrlCount = 0;
mUseInvalidLowPriorityPath = false;
mIsExactResolve = false;
}
/**
* Sets a flag to build low-priority paths that are invalid in order to test failover.
*/
@VisibleForTesting
void setLowPriorityPathInvalid() {
mUseInvalidLowPriorityPath = true;
}
/**
* @return Whether the most recent loadUrl was on an invalid path.
*/
@VisibleForTesting
boolean didAttemptLoadInvalidUrl() {
return mUseInvalidLowPriorityPath && mLoadedUrl.contains("invalid");
}
@VisibleForTesting
boolean getIsExactResolve() {
return mIsExactResolve;
}
//============================================================================================
// History Removal Helpers
//============================================================================================
/**
* @param url The URL to be checked.
* @return Whether the given URL was removed from history.
*/
public boolean hasRemovedUrl(String url) {
return mRemovedUrls.contains(url);
}
//============================================================================================
// ContextualSearchNetworkCommunicator
//============================================================================================
@Override
public void startSearchTermResolutionRequest(String selection, boolean isExactResolve) {
mLoadedUrl = null;
mSearchTermRequested = selection;
mIsExactResolve = isExactResolve;
if (mActiveFakeTapSearch != null) {
mActiveFakeTapSearch.notifySearchTermResolutionStarted();
}
}
@Override
public void handleSearchTermResolutionResponse(ResolvedSearchTerm resolvedSearchTerm) {
mBaseManager.handleSearchTermResolutionResponse(resolvedSearchTerm);
}
@Override
public boolean isOnline() {
return mIsOnline;
}
@Override
public void stopPanelContentsNavigation() {
// Stub out stop() of the WebContents.
// Navigation of the content in the overlay may have been faked in tests,
// so stopping the WebContents navigation is unsafe.
}
@Override
@Nullable
public URL getBasePageUrl() {
URL baseUrl = mBaseManager.getBasePageUrl();
if (mShouldUseHttps && baseUrl != null) {
try {
return new URL(baseUrl.toString().replace("http://", "https://"));
} catch (MalformedURLException e) {
// TODO(donnd): Auto-generated catch block
e.printStackTrace();
}
}
return baseUrl;
}
//============================================================================================
// Fake Searches Helpers
//============================================================================================
/**
* Register fake searches that can be used in tests. Each fake search takes a node ID, which
* represents the DOM node that will be touched. The node ID is also used as an ID for the
* fake search of a given type (LongPress or Tap). This means that if you need different
* behaviors you need to add new DOM nodes with different IDs in the test's HTML file.
*/
public void registerFakeSearches() {
registerFakeLongPressSearch(new FakeLongPressSearch("search", "Search"));
registerFakeLongPressSearch(new FakeLongPressSearch("term", "Term"));
registerFakeLongPressSearch(new FakeLongPressSearch("resolution", "Resolution"));
registerFakeTapSearch(new FakeTapSearch("search", false, 200, "Search", "Search"));
registerFakeTapSearch(new FakeTapSearch("term", false, 200, "Term", "Term"));
registerFakeTapSearch(
new FakeTapSearch("resolution", false, 200, "Resolution", "Resolution"));
ResolvedSearchTerm germanSearchTerm =
new ResolvedSearchTerm.Builder(false, 200, "Deutsche", "Deutsche")
.setContextLanguage("de")
.build();
FakeTapSearch germanFakeTapSearch = new FakeTapSearch("german", germanSearchTerm);
registerFakeTapSearch(germanFakeTapSearch);
registerFakeTapSearch(
new FakeTapSearch("intelligence", false, 200, "Intelligence", "Intelligence"));
// Register a fake tap search that will fake a logged event ID from the server, when
// a fake tap is done on the intelligence-logged-event-id element in the test file.
ResolvedSearchTerm searchTermWithId =
new ResolvedSearchTerm.Builder(false, 200, "Intelligence", "Intelligence")
.setLoggedEventId(LOGGED_EVENT_ID)
.build();
FakeTapSearch loggedIdFakeTapSearch =
new FakeTapSearch("intelligence-logged-event-id", searchTermWithId);
registerFakeTapSearch(loggedIdFakeTapSearch);
// Register a resolving search of "States" that expands to "United States".
ResolvedSearchTerm searchTermWithStartAdjust =
new ResolvedSearchTerm.Builder(false, 200, "States", "States")
.setSelectionStartAdjust(-7)
.build();
FakeSlowResolveSearch expandingStatesTapSearch =
new FakeSlowResolveSearch("states", searchTermWithStartAdjust);
registerFakeSlowResolveSearch(expandingStatesTapSearch);
registerFakeSlowResolveSearch(
new FakeSlowResolveSearch("search", false, 200, "Search", "Search"));
}
/**
* @param id The ID of the FakeLongPressSearch.
* @return The FakeLongPressSearch with the given ID.
*/
public FakeLongPressSearch getFakeLongPressSearch(String id) {
return mFakeLongPressSearches.get(id);
}
/**
* @param id The ID of the FakeTapSearch.
* @return The FakeTapSearch with the given ID.
*/
public FakeTapSearch getFakeTapSearch(String id) {
return mFakeTapSearches.get(id);
}
/**
* @param id The ID of the FakeSlowResolveSearch.
* @return The {@code FakeSlowResolveSearch} with the given ID.
*/
public FakeSlowResolveSearch getFakeSlowResolveSearch(String id) {
return mFakeSlowResolveSearches.get(id);
}
/**
* Register the FakeLongPressSearch.
* @param fakeSearch The FakeLongPressSearch to be registered.
*/
private void registerFakeLongPressSearch(FakeLongPressSearch fakeSearch) {
mFakeLongPressSearches.put(fakeSearch.getNodeId(), fakeSearch);
}
/**
* Register the FakeTapSearch.
* @param fakeSearch The FakeTapSearch to be registered.
*/
private void registerFakeTapSearch(FakeTapSearch fakeSearch) {
mFakeTapSearches.put(fakeSearch.getNodeId(), fakeSearch);
}
/**
* Register the FakeSlowResolveSearch.
* @param fakeSlowResolveSearch The {@code FakeSlowResolveSearch} to be registered.
*/
private void registerFakeSlowResolveSearch(FakeSlowResolveSearch fakeSlowResolveSearch) {
mFakeSlowResolveSearches.put(fakeSlowResolveSearch.getNodeId(), fakeSlowResolveSearch);
}
}
|
/* Generated By:JJTree&JavaCC: Do not edit this line. ExpressionParserTokenManager.java */
/*****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
****************************************************************/
package org.apache.cayenne.exp.parser;
import java.io.*;
import java.util.*;
import java.math.*;
import org.apache.cayenne.exp.*;
/** Token Manager. */
public class ExpressionParserTokenManager implements ExpressionParserConstants
{
/** Holds the last value computed by a constant token. */
Object literalValue;
/** Holds the last string literal parsed. */
private StringBuffer stringBuffer;
/** Converts an escape sequence into a character value. */
private char escapeChar()
{
int ofs = image.length() - 1;
switch ( image.charAt(ofs) ) {
case 'n': return '\u005cn';
case 'r': return '\u005cr';
case 't': return '\u005ct';
case 'b': return '\u005cb';
case 'f': return '\u005cf';
case '\u005c\u005c': return '\u005c\u005c';
case '\u005c'': return '\u005c'';
case '\u005c"': return '\u005c"';
}
// Otherwise, it's an octal number. Find the backslash and convert.
while ( image.charAt(--ofs) != '\u005c\u005c' )
{}
int value = 0;
while ( ++ofs < image.length() )
value = (value << 3) | (image.charAt(ofs) - '0');
return (char) value;
}
private Object makeInt()
{
Object result;
String s = image.toString();
int base = 10;
if ( s.charAt(0) == '0' )
base = (s.length() > 1 && (s.charAt(1) == 'x' || s.charAt(1) == 'X'))? 16 : 8;
if ( base == 16 )
s = s.substring(2); // Trim the 0x off the front
switch ( s.charAt(s.length()-1) ) {
case 'l': case 'L':
result = Long.valueOf( s.substring(0,s.length()-1), base );
break;
case 'h': case 'H':
result = new BigInteger( s.substring(0,s.length()-1), base );
break;
default:
result = Integer.valueOf( s, base );
break;
}
return result;
}
private Object makeFloat()
{
String s = image.toString();
switch ( s.charAt(s.length()-1) ) {
case 'f': case 'F':
return Float.valueOf( s );
case 'b': case 'B':
return new BigDecimal( s.substring(0,s.length()-1) );
case 'd': case 'D':
default:
return Double.valueOf( s );
}
}
/** Debug output. */
public PrintStream debugStream = System.out;
/** Set debug output. */
public void setDebugStream(PrintStream ds) { debugStream = ds; }
private int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private int jjMoveStringLiteralDfa0_0()
{
switch(curChar)
{
case 9:
jjmatchedKind = 31;
return jjMoveNfa_0(3, 0);
case 10:
jjmatchedKind = 32;
return jjMoveNfa_0(3, 0);
case 13:
jjmatchedKind = 33;
return jjMoveNfa_0(3, 0);
case 32:
jjmatchedKind = 30;
return jjMoveNfa_0(3, 0);
case 33:
jjmatchedKind = 4;
return jjMoveStringLiteralDfa1_0(0x80L);
case 34:
jjmatchedKind = 64;
return jjMoveNfa_0(3, 0);
case 36:
jjmatchedKind = 55;
return jjMoveNfa_0(3, 0);
case 38:
jjmatchedKind = 22;
return jjMoveNfa_0(3, 0);
case 39:
jjmatchedKind = 63;
return jjMoveNfa_0(3, 0);
case 40:
jjmatchedKind = 16;
return jjMoveNfa_0(3, 0);
case 41:
jjmatchedKind = 17;
return jjMoveNfa_0(3, 0);
case 42:
jjmatchedKind = 27;
return jjMoveNfa_0(3, 0);
case 43:
jjmatchedKind = 25;
return jjMoveNfa_0(3, 0);
case 44:
jjmatchedKind = 19;
return jjMoveNfa_0(3, 0);
case 45:
jjmatchedKind = 26;
return jjMoveNfa_0(3, 0);
case 47:
jjmatchedKind = 28;
return jjMoveNfa_0(3, 0);
case 60:
jjmatchedKind = 10;
return jjMoveStringLiteralDfa1_0(0x800300L);
case 61:
jjmatchedKind = 5;
return jjMoveStringLiteralDfa1_0(0x40L);
case 62:
jjmatchedKind = 11;
return jjMoveStringLiteralDfa1_0(0x1001000L);
case 65:
return jjMoveStringLiteralDfa1_0(0x2002000000000L);
case 67:
return jjMoveStringLiteralDfa1_0(0x70060000000000L);
case 76:
return jjMoveStringLiteralDfa1_0(0x1a00000000000L);
case 77:
return jjMoveStringLiteralDfa1_0(0x800c000000000L);
case 83:
return jjMoveStringLiteralDfa1_0(0x4090000000000L);
case 84:
return jjMoveStringLiteralDfa1_0(0x100000000000L);
case 85:
return jjMoveStringLiteralDfa1_0(0x400000000000L);
case 94:
jjmatchedKind = 21;
return jjMoveNfa_0(3, 0);
case 97:
return jjMoveStringLiteralDfa1_0(0x2002000000004L);
case 98:
return jjMoveStringLiteralDfa1_0(0x40000L);
case 99:
return jjMoveStringLiteralDfa1_0(0x70060000000000L);
case 100:
return jjMoveStringLiteralDfa1_0(0x200000000000000L);
case 101:
return jjMoveStringLiteralDfa1_0(0x400000000000000L);
case 105:
return jjMoveStringLiteralDfa1_0(0x8000L);
case 108:
return jjMoveStringLiteralDfa1_0(0x1a00000006000L);
case 109:
return jjMoveStringLiteralDfa1_0(0x800c000000000L);
case 110:
return jjMoveStringLiteralDfa1_0(0x8L);
case 111:
return jjMoveStringLiteralDfa1_0(0x100000000000002L);
case 115:
return jjMoveStringLiteralDfa1_0(0x4090000000000L);
case 116:
return jjMoveStringLiteralDfa1_0(0x100000000000L);
case 117:
return jjMoveStringLiteralDfa1_0(0x400000000000L);
case 124:
jjmatchedKind = 20;
return jjMoveNfa_0(3, 0);
case 126:
jjmatchedKind = 29;
return jjMoveNfa_0(3, 0);
default :
return jjMoveNfa_0(3, 0);
}
}
private int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 0);
}
switch(curChar)
{
case 60:
if ((active0 & 0x800000L) != 0L)
{
jjmatchedKind = 23;
jjmatchedPos = 1;
}
break;
case 61:
if ((active0 & 0x40L) != 0L)
{
jjmatchedKind = 6;
jjmatchedPos = 1;
}
else if ((active0 & 0x80L) != 0L)
{
jjmatchedKind = 7;
jjmatchedPos = 1;
}
else if ((active0 & 0x200L) != 0L)
{
jjmatchedKind = 9;
jjmatchedPos = 1;
}
else if ((active0 & 0x1000L) != 0L)
{
jjmatchedKind = 12;
jjmatchedPos = 1;
}
break;
case 62:
if ((active0 & 0x100L) != 0L)
{
jjmatchedKind = 8;
jjmatchedPos = 1;
}
else if ((active0 & 0x1000000L) != 0L)
{
jjmatchedKind = 24;
jjmatchedPos = 1;
}
break;
case 65:
return jjMoveStringLiteralDfa2_0(active0, 0x8000000000L);
case 66:
return jjMoveStringLiteralDfa2_0(active0, 0x2000000000000L);
case 69:
return jjMoveStringLiteralDfa2_0(active0, 0x800000000000L);
case 73:
return jjMoveStringLiteralDfa2_0(active0, 0x4000000000L);
case 79:
return jjMoveStringLiteralDfa2_0(active0, 0x9260000000000L);
case 80:
return jjMoveStringLiteralDfa2_0(active0, 0x400000000000L);
case 81:
return jjMoveStringLiteralDfa2_0(active0, 0x4000000000000L);
case 82:
return jjMoveStringLiteralDfa2_0(active0, 0x100000000000L);
case 85:
return jjMoveStringLiteralDfa2_0(active0, 0x70090000000000L);
case 86:
return jjMoveStringLiteralDfa2_0(active0, 0x2000000000L);
case 97:
return jjMoveStringLiteralDfa2_0(active0, 0x8000000000L);
case 98:
return jjMoveStringLiteralDfa2_0(active0, 0x302000000000000L);
case 101:
return jjMoveStringLiteralDfa2_0(active0, 0x800000040000L);
case 105:
return jjMoveStringLiteralDfa2_0(active0, 0x4000006000L);
case 110:
if ((active0 & 0x8000L) != 0L)
{
jjmatchedKind = 15;
jjmatchedPos = 1;
}
return jjMoveStringLiteralDfa2_0(active0, 0x400000000000004L);
case 111:
return jjMoveStringLiteralDfa2_0(active0, 0x9260000000008L);
case 112:
return jjMoveStringLiteralDfa2_0(active0, 0x400000000000L);
case 113:
return jjMoveStringLiteralDfa2_0(active0, 0x4000000000000L);
case 114:
if ((active0 & 0x2L) != 0L)
{
jjmatchedKind = 1;
jjmatchedPos = 1;
}
return jjMoveStringLiteralDfa2_0(active0, 0x100000000000L);
case 117:
return jjMoveStringLiteralDfa2_0(active0, 0x70090000000000L);
case 118:
return jjMoveStringLiteralDfa2_0(active0, 0x2000000000L);
default :
break;
}
return jjMoveNfa_0(3, 1);
}
private int jjMoveStringLiteralDfa2_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 1);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 1);
}
switch(curChar)
{
case 58:
if ((active0 & 0x200000000000000L) != 0L)
{
jjmatchedKind = 57;
jjmatchedPos = 2;
}
break;
case 66:
return jjMoveStringLiteralDfa3_0(active0, 0x80000000000L);
case 67:
return jjMoveStringLiteralDfa3_0(active0, 0x1000000000000L);
case 68:
if ((active0 & 0x8000000000000L) != 0L)
{
jjmatchedKind = 51;
jjmatchedPos = 2;
}
break;
case 71:
if ((active0 & 0x2000000000L) != 0L)
{
jjmatchedKind = 37;
jjmatchedPos = 2;
}
break;
case 73:
return jjMoveStringLiteralDfa3_0(active0, 0x100000000000L);
case 77:
if ((active0 & 0x10000000000L) != 0L)
{
jjmatchedKind = 40;
jjmatchedPos = 2;
}
break;
case 78:
if ((active0 & 0x4000000000L) != 0L)
{
jjmatchedKind = 38;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x840000000000L);
case 80:
return jjMoveStringLiteralDfa3_0(active0, 0x400000000000L);
case 82:
return jjMoveStringLiteralDfa3_0(active0, 0x74000000000000L);
case 83:
if ((active0 & 0x2000000000000L) != 0L)
{
jjmatchedKind = 49;
jjmatchedPos = 2;
}
break;
case 85:
return jjMoveStringLiteralDfa3_0(active0, 0x20000000000L);
case 87:
return jjMoveStringLiteralDfa3_0(active0, 0x200000000000L);
case 88:
if ((active0 & 0x8000000000L) != 0L)
{
jjmatchedKind = 39;
jjmatchedPos = 2;
}
break;
case 98:
return jjMoveStringLiteralDfa3_0(active0, 0x80000000000L);
case 99:
return jjMoveStringLiteralDfa3_0(active0, 0x1000000000000L);
case 100:
if ((active0 & 0x4L) != 0L)
{
jjmatchedKind = 2;
jjmatchedPos = 2;
}
else if ((active0 & 0x8000000000000L) != 0L)
{
jjmatchedKind = 51;
jjmatchedPos = 2;
}
break;
case 103:
if ((active0 & 0x2000000000L) != 0L)
{
jjmatchedKind = 37;
jjmatchedPos = 2;
}
break;
case 105:
return jjMoveStringLiteralDfa3_0(active0, 0x100000000000L);
case 106:
return jjMoveStringLiteralDfa3_0(active0, 0x100000000000000L);
case 107:
return jjMoveStringLiteralDfa3_0(active0, 0x6000L);
case 109:
if ((active0 & 0x10000000000L) != 0L)
{
jjmatchedKind = 40;
jjmatchedPos = 2;
}
break;
case 110:
if ((active0 & 0x4000000000L) != 0L)
{
jjmatchedKind = 38;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x840000000000L);
case 112:
return jjMoveStringLiteralDfa3_0(active0, 0x400000000000L);
case 114:
return jjMoveStringLiteralDfa3_0(active0, 0x74000000000000L);
case 115:
if ((active0 & 0x2000000000000L) != 0L)
{
jjmatchedKind = 49;
jjmatchedPos = 2;
}
break;
case 116:
if ((active0 & 0x8L) != 0L)
{
jjmatchedKind = 3;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x40000L);
case 117:
return jjMoveStringLiteralDfa3_0(active0, 0x400020000000000L);
case 119:
return jjMoveStringLiteralDfa3_0(active0, 0x200000000000L);
case 120:
if ((active0 & 0x8000000000L) != 0L)
{
jjmatchedKind = 39;
jjmatchedPos = 2;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 2);
}
private int jjMoveStringLiteralDfa3_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 2);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 2);
}
switch(curChar)
{
case 58:
if ((active0 & 0x100000000000000L) != 0L)
{
jjmatchedKind = 56;
jjmatchedPos = 3;
}
break;
case 65:
return jjMoveStringLiteralDfa4_0(active0, 0x1000000000000L);
case 67:
return jjMoveStringLiteralDfa4_0(active0, 0x40000000000L);
case 69:
return jjMoveStringLiteralDfa4_0(active0, 0x600000000000L);
case 71:
return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L);
case 77:
if ((active0 & 0x100000000000L) != 0L)
{
jjmatchedKind = 44;
jjmatchedPos = 3;
}
break;
case 78:
return jjMoveStringLiteralDfa4_0(active0, 0x20000000000L);
case 82:
return jjMoveStringLiteralDfa4_0(active0, 0x70000000000000L);
case 83:
return jjMoveStringLiteralDfa4_0(active0, 0x80000000000L);
case 84:
if ((active0 & 0x4000000000000L) != 0L)
{
jjmatchedKind = 50;
jjmatchedPos = 3;
}
break;
case 97:
return jjMoveStringLiteralDfa4_0(active0, 0x1000000000000L);
case 99:
return jjMoveStringLiteralDfa4_0(active0, 0x40000000000L);
case 101:
if ((active0 & 0x2000L) != 0L)
{
jjmatchedKind = 13;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x600000004000L);
case 103:
return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L);
case 109:
if ((active0 & 0x100000000000L) != 0L)
{
jjmatchedKind = 44;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x400000000000000L);
case 110:
return jjMoveStringLiteralDfa4_0(active0, 0x20000000000L);
case 114:
return jjMoveStringLiteralDfa4_0(active0, 0x70000000000000L);
case 115:
return jjMoveStringLiteralDfa4_0(active0, 0x80000000000L);
case 116:
if ((active0 & 0x4000000000000L) != 0L)
{
jjmatchedKind = 50;
jjmatchedPos = 3;
}
break;
case 119:
return jjMoveStringLiteralDfa4_0(active0, 0x40000L);
default :
break;
}
return jjMoveNfa_0(3, 3);
}
private int jjMoveStringLiteralDfa4_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 3);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 3);
}
switch(curChar)
{
case 58:
if ((active0 & 0x400000000000000L) != 0L)
{
jjmatchedKind = 58;
jjmatchedPos = 4;
}
break;
case 65:
return jjMoveStringLiteralDfa5_0(active0, 0x40000000000L);
case 69:
return jjMoveStringLiteralDfa5_0(active0, 0x70000000000000L);
case 73:
return jjMoveStringLiteralDfa5_0(active0, 0x4000L);
case 82:
if ((active0 & 0x200000000000L) != 0L)
{
jjmatchedKind = 45;
jjmatchedPos = 4;
}
else if ((active0 & 0x400000000000L) != 0L)
{
jjmatchedKind = 46;
jjmatchedPos = 4;
}
break;
case 84:
if ((active0 & 0x20000000000L) != 0L)
{
jjmatchedKind = 41;
jjmatchedPos = 4;
}
return jjMoveStringLiteralDfa5_0(active0, 0x1880000000000L);
case 97:
return jjMoveStringLiteralDfa5_0(active0, 0x40000000000L);
case 101:
return jjMoveStringLiteralDfa5_0(active0, 0x70000000040000L);
case 114:
if ((active0 & 0x200000000000L) != 0L)
{
jjmatchedKind = 45;
jjmatchedPos = 4;
}
else if ((active0 & 0x400000000000L) != 0L)
{
jjmatchedKind = 46;
jjmatchedPos = 4;
}
break;
case 116:
if ((active0 & 0x20000000000L) != 0L)
{
jjmatchedKind = 41;
jjmatchedPos = 4;
}
return jjMoveStringLiteralDfa5_0(active0, 0x1880000000000L);
default :
break;
}
return jjMoveNfa_0(3, 4);
}
private int jjMoveStringLiteralDfa5_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 4);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 4);
}
switch(curChar)
{
case 69:
if ((active0 & 0x1000000000000L) != 0L)
{
jjmatchedKind = 48;
jjmatchedPos = 5;
}
break;
case 72:
if ((active0 & 0x800000000000L) != 0L)
{
jjmatchedKind = 47;
jjmatchedPos = 5;
}
break;
case 78:
return jjMoveStringLiteralDfa6_0(active0, 0x70000000000000L);
case 82:
return jjMoveStringLiteralDfa6_0(active0, 0x80000000000L);
case 84:
if ((active0 & 0x40000000000L) != 0L)
{
jjmatchedKind = 42;
jjmatchedPos = 5;
}
break;
case 101:
if ((active0 & 0x1000000000000L) != 0L)
{
jjmatchedKind = 48;
jjmatchedPos = 5;
}
return jjMoveStringLiteralDfa6_0(active0, 0x40000L);
case 103:
return jjMoveStringLiteralDfa6_0(active0, 0x4000L);
case 104:
if ((active0 & 0x800000000000L) != 0L)
{
jjmatchedKind = 47;
jjmatchedPos = 5;
}
break;
case 110:
return jjMoveStringLiteralDfa6_0(active0, 0x70000000000000L);
case 114:
return jjMoveStringLiteralDfa6_0(active0, 0x80000000000L);
case 116:
if ((active0 & 0x40000000000L) != 0L)
{
jjmatchedKind = 42;
jjmatchedPos = 5;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 5);
}
private int jjMoveStringLiteralDfa6_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 5);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 5);
}
switch(curChar)
{
case 73:
return jjMoveStringLiteralDfa7_0(active0, 0x80000000000L);
case 84:
return jjMoveStringLiteralDfa7_0(active0, 0x70000000000000L);
case 105:
return jjMoveStringLiteralDfa7_0(active0, 0x80000000000L);
case 110:
if ((active0 & 0x40000L) != 0L)
{
jjmatchedKind = 18;
jjmatchedPos = 6;
}
return jjMoveStringLiteralDfa7_0(active0, 0x4000L);
case 116:
return jjMoveStringLiteralDfa7_0(active0, 0x70000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 6);
}
private int jjMoveStringLiteralDfa7_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 6);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 6);
}
switch(curChar)
{
case 78:
return jjMoveStringLiteralDfa8_0(active0, 0x80000000000L);
case 95:
return jjMoveStringLiteralDfa8_0(active0, 0x70000000000000L);
case 110:
return jjMoveStringLiteralDfa8_0(active0, 0x80000000000L);
case 111:
return jjMoveStringLiteralDfa8_0(active0, 0x4000L);
default :
break;
}
return jjMoveNfa_0(3, 7);
}
private int jjMoveStringLiteralDfa8_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 7);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 7);
}
switch(curChar)
{
case 68:
return jjMoveStringLiteralDfa9_0(active0, 0x10000000000000L);
case 71:
if ((active0 & 0x80000000000L) != 0L)
{
jjmatchedKind = 43;
jjmatchedPos = 8;
}
break;
case 84:
return jjMoveStringLiteralDfa9_0(active0, 0x60000000000000L);
case 100:
return jjMoveStringLiteralDfa9_0(active0, 0x10000000000000L);
case 103:
if ((active0 & 0x80000000000L) != 0L)
{
jjmatchedKind = 43;
jjmatchedPos = 8;
}
break;
case 114:
return jjMoveStringLiteralDfa9_0(active0, 0x4000L);
case 116:
return jjMoveStringLiteralDfa9_0(active0, 0x60000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 8);
}
private int jjMoveStringLiteralDfa9_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 8);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 8);
}
switch(curChar)
{
case 65:
return jjMoveStringLiteralDfa10_0(active0, 0x10000000000000L);
case 73:
return jjMoveStringLiteralDfa10_0(active0, 0x60000000000000L);
case 97:
return jjMoveStringLiteralDfa10_0(active0, 0x10000000000000L);
case 101:
return jjMoveStringLiteralDfa10_0(active0, 0x4000L);
case 105:
return jjMoveStringLiteralDfa10_0(active0, 0x60000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 9);
}
private int jjMoveStringLiteralDfa10_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 9);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 9);
}
switch(curChar)
{
case 67:
return jjMoveStringLiteralDfa11_0(active0, 0x4000L);
case 77:
return jjMoveStringLiteralDfa11_0(active0, 0x60000000000000L);
case 84:
return jjMoveStringLiteralDfa11_0(active0, 0x10000000000000L);
case 109:
return jjMoveStringLiteralDfa11_0(active0, 0x60000000000000L);
case 116:
return jjMoveStringLiteralDfa11_0(active0, 0x10000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 10);
}
private int jjMoveStringLiteralDfa11_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 10);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 10);
}
switch(curChar)
{
case 69:
if ((active0 & 0x10000000000000L) != 0L)
{
jjmatchedKind = 52;
jjmatchedPos = 11;
}
else if ((active0 & 0x20000000000000L) != 0L)
{
jjmatchedKind = 53;
jjmatchedPos = 11;
}
return jjMoveStringLiteralDfa12_0(active0, 0x40000000000000L);
case 97:
return jjMoveStringLiteralDfa12_0(active0, 0x4000L);
case 101:
if ((active0 & 0x10000000000000L) != 0L)
{
jjmatchedKind = 52;
jjmatchedPos = 11;
}
else if ((active0 & 0x20000000000000L) != 0L)
{
jjmatchedKind = 53;
jjmatchedPos = 11;
}
return jjMoveStringLiteralDfa12_0(active0, 0x40000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 11);
}
private int jjMoveStringLiteralDfa12_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 11);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 11);
}
switch(curChar)
{
case 83:
return jjMoveStringLiteralDfa13_0(active0, 0x40000000000000L);
case 115:
return jjMoveStringLiteralDfa13_0(active0, 0x40000000004000L);
default :
break;
}
return jjMoveNfa_0(3, 12);
}
private int jjMoveStringLiteralDfa13_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 12);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 12);
}
switch(curChar)
{
case 84:
return jjMoveStringLiteralDfa14_0(active0, 0x40000000000000L);
case 101:
if ((active0 & 0x4000L) != 0L)
{
jjmatchedKind = 14;
jjmatchedPos = 13;
}
break;
case 116:
return jjMoveStringLiteralDfa14_0(active0, 0x40000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 13);
}
private int jjMoveStringLiteralDfa14_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 13);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 13);
}
switch(curChar)
{
case 65:
return jjMoveStringLiteralDfa15_0(active0, 0x40000000000000L);
case 97:
return jjMoveStringLiteralDfa15_0(active0, 0x40000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 14);
}
private int jjMoveStringLiteralDfa15_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 14);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 14);
}
switch(curChar)
{
case 77:
return jjMoveStringLiteralDfa16_0(active0, 0x40000000000000L);
case 109:
return jjMoveStringLiteralDfa16_0(active0, 0x40000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 15);
}
private int jjMoveStringLiteralDfa16_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 15);
try { curChar = input_stream.readChar(); }
catch(IOException e) {
return jjMoveNfa_0(3, 15);
}
switch(curChar)
{
case 80:
if ((active0 & 0x40000000000000L) != 0L)
{
jjmatchedKind = 54;
jjmatchedPos = 16;
}
break;
case 112:
if ((active0 & 0x40000000000000L) != 0L)
{
jjmatchedKind = 54;
jjmatchedPos = 16;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 16);
}
private int jjMoveNfa_0(int startState, int curPos)
{
int strKind = jjmatchedKind;
int strPos = jjmatchedPos;
int seenUpto;
input_stream.backup(seenUpto = curPos + 1);
try { curChar = input_stream.readChar(); }
catch(IOException e) { throw new Error("Internal Error"); }
curPos = 0;
int startsAt = 0;
jjnewStateCnt = 56;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 3:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddStates(0, 5);
else if (curChar == 46)
jjCheckNAdd(30);
if ((0x3fe000000000000L & l) != 0L)
{
if (kind > 71)
kind = 71;
jjCheckNAddTwoStates(27, 28);
}
else if (curChar == 48)
{
if (kind > 71)
kind = 71;
jjCheckNAddStates(6, 8);
}
break;
case 26:
if ((0x3fe000000000000L & l) == 0L)
break;
if (kind > 71)
kind = 71;
jjCheckNAddTwoStates(27, 28);
break;
case 27:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 71)
kind = 71;
jjCheckNAddTwoStates(27, 28);
break;
case 29:
if (curChar == 46)
jjCheckNAdd(30);
break;
case 30:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 72)
kind = 72;
jjCheckNAddStates(9, 11);
break;
case 32:
if ((0x280000000000L & l) != 0L)
jjCheckNAdd(33);
break;
case 33:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 72)
kind = 72;
jjCheckNAddTwoStates(33, 34);
break;
case 35:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddStates(0, 5);
break;
case 36:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddTwoStates(36, 37);
break;
case 37:
if (curChar != 46)
break;
if (kind > 72)
kind = 72;
jjCheckNAddStates(12, 14);
break;
case 38:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 72)
kind = 72;
jjCheckNAddStates(12, 14);
break;
case 39:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddTwoStates(39, 40);
break;
case 41:
if ((0x280000000000L & l) != 0L)
jjCheckNAdd(42);
break;
case 42:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 72)
kind = 72;
jjCheckNAddTwoStates(42, 34);
break;
case 43:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddTwoStates(43, 34);
break;
case 44:
if (curChar != 48)
break;
if (kind > 71)
kind = 71;
jjCheckNAddStates(6, 8);
break;
case 45:
if ((0xff000000000000L & l) == 0L)
break;
if (kind > 71)
kind = 71;
jjCheckNAddTwoStates(45, 28);
break;
case 47:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 71)
kind = 71;
jjCheckNAddTwoStates(47, 28);
break;
case 49:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 59)
kind = 59;
jjCheckNAddStates(15, 17);
break;
case 50:
if (curChar != 43)
break;
if (kind > 59)
kind = 59;
jjCheckNAdd(51);
break;
case 51:
if (curChar == 46)
jjstateSet[jjnewStateCnt++] = 52;
break;
case 53:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 59)
kind = 59;
jjCheckNAddStates(18, 20);
break;
case 54:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 60)
kind = 60;
jjAddStates(21, 22);
break;
case 55:
if (curChar == 43 && kind > 60)
kind = 60;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 3:
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 59)
kind = 59;
jjCheckNAddStates(23, 27);
}
if (curChar == 70)
jjstateSet[jjnewStateCnt++] = 24;
else if (curChar == 102)
jjstateSet[jjnewStateCnt++] = 19;
else if (curChar == 84)
jjstateSet[jjnewStateCnt++] = 14;
else if (curChar == 116)
jjstateSet[jjnewStateCnt++] = 10;
else if (curChar == 78)
jjstateSet[jjnewStateCnt++] = 6;
else if (curChar == 110)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 0:
if (curChar == 108 && kind > 34)
kind = 34;
break;
case 1:
if (curChar == 108)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 2:
if (curChar == 117)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 4:
if (curChar == 76 && kind > 34)
kind = 34;
break;
case 5:
if (curChar == 76)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 6:
if (curChar == 85)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 7:
if (curChar == 78)
jjstateSet[jjnewStateCnt++] = 6;
break;
case 8:
if (curChar == 101 && kind > 35)
kind = 35;
break;
case 9:
if (curChar == 117)
jjstateSet[jjnewStateCnt++] = 8;
break;
case 10:
if (curChar == 114)
jjstateSet[jjnewStateCnt++] = 9;
break;
case 11:
if (curChar == 116)
jjstateSet[jjnewStateCnt++] = 10;
break;
case 12:
if (curChar == 69 && kind > 35)
kind = 35;
break;
case 13:
if (curChar == 85)
jjstateSet[jjnewStateCnt++] = 12;
break;
case 14:
if (curChar == 82)
jjstateSet[jjnewStateCnt++] = 13;
break;
case 15:
if (curChar == 84)
jjstateSet[jjnewStateCnt++] = 14;
break;
case 16:
if (curChar == 101 && kind > 36)
kind = 36;
break;
case 17:
if (curChar == 115)
jjstateSet[jjnewStateCnt++] = 16;
break;
case 18:
if (curChar == 108)
jjstateSet[jjnewStateCnt++] = 17;
break;
case 19:
if (curChar == 97)
jjstateSet[jjnewStateCnt++] = 18;
break;
case 20:
if (curChar == 102)
jjstateSet[jjnewStateCnt++] = 19;
break;
case 21:
if (curChar == 69 && kind > 36)
kind = 36;
break;
case 22:
if (curChar == 83)
jjstateSet[jjnewStateCnt++] = 21;
break;
case 23:
if (curChar == 76)
jjstateSet[jjnewStateCnt++] = 22;
break;
case 24:
if (curChar == 65)
jjstateSet[jjnewStateCnt++] = 23;
break;
case 25:
if (curChar == 70)
jjstateSet[jjnewStateCnt++] = 24;
break;
case 28:
if ((0x110000001100L & l) != 0L && kind > 71)
kind = 71;
break;
case 31:
if ((0x2000000020L & l) != 0L)
jjAddStates(28, 29);
break;
case 34:
if ((0x5400000054L & l) != 0L && kind > 72)
kind = 72;
break;
case 40:
if ((0x2000000020L & l) != 0L)
jjAddStates(30, 31);
break;
case 46:
if ((0x100000001000000L & l) != 0L)
jjCheckNAdd(47);
break;
case 47:
if ((0x7e0000007eL & l) == 0L)
break;
if (kind > 71)
kind = 71;
jjCheckNAddTwoStates(47, 28);
break;
case 48:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 59)
kind = 59;
jjCheckNAddStates(23, 27);
break;
case 49:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 59)
kind = 59;
jjCheckNAddStates(15, 17);
break;
case 52:
case 53:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 59)
kind = 59;
jjCheckNAddStates(18, 20);
break;
case 54:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 60)
kind = 60;
jjCheckNAddTwoStates(54, 55);
break;
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 56 - (jjnewStateCnt = startsAt)))
break;
try { curChar = input_stream.readChar(); }
catch(IOException e) { break; }
}
if (jjmatchedPos > strPos)
return curPos;
int toRet = Math.max(curPos, seenUpto);
if (curPos < toRet)
for (i = toRet - Math.min(curPos, seenUpto); i-- > 0; )
try { curChar = input_stream.readChar(); }
catch(IOException e) { throw new Error("Internal Error : Please send a bug report."); }
if (jjmatchedPos < strPos)
{
jjmatchedKind = strKind;
jjmatchedPos = strPos;
}
else if (jjmatchedPos == strPos && jjmatchedKind > strKind)
jjmatchedKind = strKind;
return toRet;
}
private final int jjStopStringLiteralDfa_1(int pos, long active0, long active1)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_1(int pos, long active0, long active1)
{
return jjMoveNfa_1(jjStopStringLiteralDfa_1(pos, active0, active1), pos + 1);
}
private int jjMoveStringLiteralDfa0_1()
{
switch(curChar)
{
case 39:
return jjStopAtPos(0, 67);
default :
return jjMoveNfa_1(0, 0);
}
}
static final long[] jjbitVec0 = {
0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
};
static final long[] jjbitVec2 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private int jjMoveNfa_1(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 6;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
if ((0xffffff7fffffffffL & l) != 0L && kind > 66)
kind = 66;
break;
case 1:
if ((0x8400000000L & l) != 0L && kind > 65)
kind = 65;
break;
case 2:
if ((0xf000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 3:
if ((0xff000000000000L & l) == 0L)
break;
if (kind > 65)
kind = 65;
jjstateSet[jjnewStateCnt++] = 4;
break;
case 4:
if ((0xff000000000000L & l) != 0L && kind > 65)
kind = 65;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if ((0xffffffffefffffffL & l) != 0L)
{
if (kind > 66)
kind = 66;
}
else if (curChar == 92)
jjAddStates(32, 34);
break;
case 1:
if ((0x14404510000000L & l) != 0L && kind > 65)
kind = 65;
break;
case 5:
if ((0xffffffffefffffffL & l) != 0L && kind > 66)
kind = 66;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if (jjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 66)
kind = 66;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 6 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_2(int pos, long active0, long active1)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_2(int pos, long active0, long active1)
{
return jjMoveNfa_2(jjStopStringLiteralDfa_2(pos, active0, active1), pos + 1);
}
private int jjMoveStringLiteralDfa0_2()
{
switch(curChar)
{
case 34:
return jjStopAtPos(0, 70);
default :
return jjMoveNfa_2(0, 0);
}
}
private int jjMoveNfa_2(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 6;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
if ((0xfffffffbffffffffL & l) != 0L && kind > 69)
kind = 69;
break;
case 1:
if ((0x8400000000L & l) != 0L && kind > 68)
kind = 68;
break;
case 2:
if ((0xf000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 3:
if ((0xff000000000000L & l) == 0L)
break;
if (kind > 68)
kind = 68;
jjstateSet[jjnewStateCnt++] = 4;
break;
case 4:
if ((0xff000000000000L & l) != 0L && kind > 68)
kind = 68;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if ((0xffffffffefffffffL & l) != 0L)
{
if (kind > 69)
kind = 69;
}
else if (curChar == 92)
jjAddStates(32, 34);
break;
case 1:
if ((0x14404510000000L & l) != 0L && kind > 68)
kind = 68;
break;
case 5:
if ((0xffffffffefffffffL & l) != 0L && kind > 69)
kind = 69;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if (jjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 69)
kind = 69;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 6 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(IOException e) { return curPos; }
}
}
static final int[] jjnextStates = {
36, 37, 39, 40, 43, 34, 45, 46, 28, 30, 31, 34, 38, 31, 34, 49,
50, 51, 51, 53, 50, 54, 55, 49, 50, 51, 54, 55, 32, 33, 41, 42,
1, 2, 3,
};
private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
{
switch(hiByte)
{
case 0:
return ((jjbitVec2[i2] & l2) != 0L);
default :
if ((jjbitVec0[i1] & l1) != 0L)
return true;
return false;
}
}
/** Token literal values. */
public static final String[] jjstrLiteralImages = {
"", "\157\162", "\141\156\144", "\156\157\164", "\41", "\75", "\75\75",
"\41\75", "\74\76", "\74\75", "\74", "\76", "\76\75", "\154\151\153\145",
"\154\151\153\145\111\147\156\157\162\145\103\141\163\145", "\151\156", "\50", "\51", "\142\145\164\167\145\145\156", "\54", "\174",
"\136", "\46", "\74\74", "\76\76", "\53", "\55", "\52", "\57", "\176", null, null,
null, null, null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, "\44", "\157\142\152\72",
"\144\142\72", "\145\156\165\155\72", null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, };
/** Lexer state names. */
public static final String[] lexStateNames = {
"DEFAULT",
"WithinSingleQuoteLiteral",
"WithinDoubleQuoteLiteral",
};
/** Lex State array. */
public static final int[] jjnewLexState = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 2, -1, -1, 0, -1, -1, 0, -1, -1, -1, -1,
-1, -1,
};
static final long[] jjtoToken = {
0x1ffffffc3fffffffL, 0x1c8L,
};
static final long[] jjtoSkip = {
0x3c0000000L, 0x0L,
};
static final long[] jjtoMore = {
0x8000000000000000L, 0x37L,
};
protected JavaCharStream input_stream;
private final int[] jjrounds = new int[56];
private final int[] jjstateSet = new int[112];
private final StringBuilder jjimage = new StringBuilder();
private StringBuilder image = jjimage;
private int jjimageLen;
private int lengthOfMatch;
protected char curChar;
/** Constructor. */
public ExpressionParserTokenManager(JavaCharStream stream){
if (JavaCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
/** Constructor. */
public ExpressionParserTokenManager(JavaCharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
/** Reinitialise parser. */
public void ReInit(JavaCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 56; i-- > 0;)
jjrounds[i] = 0x80000000;
}
/** Reinitialise parser. */
public void ReInit(JavaCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
/** Switch to specified lex state. */
public void SwitchTo(int lexState)
{
if (lexState >= 3 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
protected Token jjFillToken()
{
final Token t;
final String curTokenImage;
final int beginLine;
final int endLine;
final int beginColumn;
final int endColumn;
String im = jjstrLiteralImages[jjmatchedKind];
curTokenImage = (im == null) ? input_stream.GetImage() : im;
beginLine = input_stream.getBeginLine();
beginColumn = input_stream.getBeginColumn();
endLine = input_stream.getEndLine();
endColumn = input_stream.getEndColumn();
t = Token.newToken(jjmatchedKind, curTokenImage);
t.beginLine = beginLine;
t.endLine = endLine;
t.beginColumn = beginColumn;
t.endColumn = endColumn;
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
/** Get the next Token. */
public Token getNextToken()
{
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
return matchedToken;
}
image = jjimage;
image.setLength(0);
jjimageLen = 0;
for (;;)
{
switch(curLexState)
{
case 0:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
break;
case 1:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_1();
break;
case 2:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_2();
break;
}
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
TokenLexicalActions(matchedToken);
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
return matchedToken;
}
else if ((jjtoSkip[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
continue EOFLoop;
}
MoreLexicalActions();
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
curPos = 0;
jjmatchedKind = 0x7fffffff;
try {
curChar = input_stream.readChar();
continue;
}
catch (IOException e1) { }
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
void MoreLexicalActions()
{
jjimageLen += (lengthOfMatch = jjmatchedPos + 1);
switch(jjmatchedKind)
{
case 63 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
stringBuffer = new StringBuffer();
break;
case 64 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
stringBuffer = new StringBuffer();
break;
case 65 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
stringBuffer.append( escapeChar() );
break;
case 66 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
stringBuffer.append( image.charAt(image.length()-1) );
break;
case 68 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
stringBuffer.append( escapeChar() );
break;
case 69 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
stringBuffer.append( image.charAt(image.length()-1) );
break;
default :
break;
}
}
void TokenLexicalActions(Token matchedToken)
{
switch(jjmatchedKind)
{
case 67 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
literalValue = stringBuffer.toString();
break;
case 70 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
literalValue = stringBuffer.toString();
break;
case 71 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
literalValue = makeInt();
break;
case 72 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
literalValue = makeFloat();
break;
default :
break;
}
}
private void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
}
|
package com.bbs.po;
public class EducationInformation {
private Integer educateId;
private Integer userId;
private String username;
private String collegeRecTime;
private String collegeName;
private String highSchoolRecTime;
private String highSchoolName;
private String careerSchoolRecTime;
private String careerSchoolName;
private String midSchoolRecTime;
private String midSchoolName;
private String priSchoolRecTime;
private String priSchoolName;
public Integer getEducateId() {
return educateId;
}
public void setEducateId(Integer educateId) {
this.educateId = educateId;
}
public Integer getUserId() {
return userId;
}
public void setUserId(Integer userId) {
this.userId = userId;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getCollegeRecTime() {
return collegeRecTime;
}
public void setCollegeRecTime(String collegeRecTime) {
this.collegeRecTime = collegeRecTime;
}
public String getCollegeName() {
return collegeName;
}
public void setCollegeName(String collegeName) {
this.collegeName = collegeName;
}
public String getHighSchoolRecTime() {
return highSchoolRecTime;
}
public void setHighSchoolRecTime(String highSchoolRecTime) {
this.highSchoolRecTime = highSchoolRecTime;
}
public String getHighSchoolName() {
return highSchoolName;
}
public void setHighSchoolName(String highSchoolName) {
this.highSchoolName = highSchoolName;
}
public String getCareerSchoolRecTime() {
return careerSchoolRecTime;
}
public void setCareerSchoolRecTime(String careerSchoolRecTime) {
this.careerSchoolRecTime = careerSchoolRecTime;
}
public String getCareerSchoolName() {
return careerSchoolName;
}
public void setCareerSchoolName(String careerSchoolName) {
this.careerSchoolName = careerSchoolName;
}
public String getMidSchoolRecTime() {
return midSchoolRecTime;
}
public void setMidSchoolRecTime(String midSchoolRecTime) {
this.midSchoolRecTime = midSchoolRecTime;
}
public String getMidSchoolName() {
return midSchoolName;
}
public void setMidSchoolName(String midSchoolName) {
this.midSchoolName = midSchoolName;
}
public String getPriSchoolRecTime() {
return priSchoolRecTime;
}
public void setPriSchoolRecTime(String priSchoolRecTime) {
this.priSchoolRecTime = priSchoolRecTime;
}
public String getPriSchoolName() {
return priSchoolName;
}
public void setPriSchoolName(String priSchoolName) {
this.priSchoolName = priSchoolName;
}
@Override
public String toString() {
return "EducationInformation [educateId=" + educateId + ", userId=" + userId + ", username=" + username
+ ", collegeRecTime=" + collegeRecTime + ", collegeName=" + collegeName + ", highSchoolRecTime="
+ highSchoolRecTime + ", highSchoolName=" + highSchoolName + ", careerSchoolRecTime="
+ careerSchoolRecTime + ", careerSchoolName=" + careerSchoolName + ", midSchoolRecTime="
+ midSchoolRecTime + ", midSchoolName=" + midSchoolName + ", priSchoolRecTime=" + priSchoolRecTime
+ ", priSchoolName=" + priSchoolName + "]";
}
}
|
/*
* Copyright 2014 Pierre Chabardes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package Libraries;
import android.opengl.EGLContext;
import android.util.Log;
import com.github.nkzawa.emitter.Emitter;
import com.github.nkzawa.socketio.client.IO;
import com.github.nkzawa.socketio.client.Socket;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.AudioSource;
import org.webrtc.DataChannel;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoCapturerAndroid;
import org.webrtc.VideoSource;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.LinkedList;
public class WebRtcClient {
private final static String TAG = WebRtcClient.class.getCanonicalName();
private final static int MAX_PEER = 2;
private boolean[] endPoints = new boolean[MAX_PEER];
private PeerConnectionFactory factory;
private HashMap<String, Peer> peers = new HashMap<>();
private LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<>();
private PeerConnectionParameters pcParams;
private MediaConstraints pcConstraints = new MediaConstraints();
private MediaStream localMS;
private VideoSource videoSource;
private RtcListener mListener;
private Socket client;
/**
* Implement this interface to be notified of events.
*/
public interface RtcListener{
void onCallReady(String callId);
void onStatusChanged(String newStatus);
void onLocalStream(MediaStream localStream);
void onAddRemoteStream(MediaStream remoteStream, int endPoint);
void onRemoveRemoteStream(int endPoint);
}
private interface Command{
void execute(String peerId, JSONObject payload) throws JSONException;
}
private class CreateOfferCommand implements Command{
public void execute(String peerId, JSONObject payload) throws JSONException {
Log.d(TAG,"CreateOfferCommand");
Peer peer = peers.get(peerId);
peer.pc.createOffer(peer, pcConstraints);
}
}
private class CreateAnswerCommand implements Command{
public void execute(String peerId, JSONObject payload) throws JSONException {
Log.d(TAG,"CreateAnswerCommand");
Peer peer = peers.get(peerId);
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(payload.getString("type")),
payload.getString("sdp")
);
peer.pc.setRemoteDescription(peer, sdp);
peer.pc.createAnswer(peer, pcConstraints);
}
}
private class SetRemoteSDPCommand implements Command{
public void execute(String peerId, JSONObject payload) throws JSONException {
Log.d(TAG,"SetRemoteSDPCommand");
Peer peer = peers.get(peerId);
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(payload.getString("type")),
payload.getString("sdp")
);
peer.pc.setRemoteDescription(peer, sdp);
}
}
private class AddIceCandidateCommand implements Command{
public void execute(String peerId, JSONObject payload) throws JSONException {
Log.d(TAG,"AddIceCandidateCommand");
PeerConnection pc = peers.get(peerId).pc;
if (pc.getRemoteDescription() != null) {
IceCandidate candidate = new IceCandidate(
payload.getString("id"),
payload.getInt("label"),
payload.getString("candidate")
);
pc.addIceCandidate(candidate);
}
}
}
/**
* Send a message through the signaling server
*
* @param to id of recipient
* @param type type of message
* @param payload payload of message
* @throws org.json.JSONException
*/
public void sendMessage(String to, String type, JSONObject payload) throws JSONException {
JSONObject message = new JSONObject();
message.put("to", to);
message.put("type", type);
message.put("payload", payload);
client.emit("message", message);
}
private class MessageHandler {
private HashMap<String, Command> commandMap;
private MessageHandler() {
this.commandMap = new HashMap<>();
commandMap.put("init", new CreateOfferCommand());
commandMap.put("offer", new CreateAnswerCommand());
commandMap.put("answer", new SetRemoteSDPCommand());
commandMap.put("candidate", new AddIceCandidateCommand());
}
private Emitter.Listener onMessage = new Emitter.Listener() {
@Override
public void call(Object... args) {
JSONObject data = (JSONObject) args[0];
try {
String from = data.getString("from");
String type = data.getString("type");
JSONObject payload = null;
if(!type.equals("init")) {
payload = data.getJSONObject("payload");
}
// if peer is unknown, try to add him
if(!peers.containsKey(from)) {
// if MAX_PEER is reach, ignore the call
int endPoint = findEndPoint();
if(endPoint != MAX_PEER) {
Peer peer = addPeer(from, endPoint);
peer.pc.addStream(localMS);
commandMap.get(type).execute(from, payload);
}
} else {
commandMap.get(type).execute(from, payload);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
};
private Emitter.Listener onId = new Emitter.Listener() {
@Override
public void call(Object... args) {
String id = (String) args[0];
mListener.onCallReady(id);
}
};
}
private class Peer implements SdpObserver, PeerConnection.Observer{
private PeerConnection pc;
private String id;
private int endPoint;
@Override
public void onCreateSuccess(final SessionDescription sdp) {
// TODO: modify sdp to use pcParams prefered codecs
try {
JSONObject payload = new JSONObject();
payload.put("type", sdp.type.canonicalForm());
payload.put("sdp", sdp.description);
sendMessage(id, sdp.type.canonicalForm(), payload);
pc.setLocalDescription(Peer.this, sdp);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onSetSuccess() {}
@Override
public void onCreateFailure(String s) {}
@Override
public void onSetFailure(String s) {}
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
if(iceConnectionState == PeerConnection.IceConnectionState.DISCONNECTED) {
mListener.onStatusChanged("DISCONNECTED");
removePeer(id);
}
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {}
@Override
public void onIceCandidate(final IceCandidate candidate) {
try {
JSONObject payload = new JSONObject();
payload.put("label", candidate.sdpMLineIndex);
payload.put("id", candidate.sdpMid);
payload.put("candidate", candidate.sdp);
sendMessage(id, "candidate", payload);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onAddStream(MediaStream mediaStream) {
Log.d(TAG,"onAddStream "+mediaStream.label());
// remote streams are displayed from 1 to MAX_PEER (0 is localStream)
mListener.onAddRemoteStream(mediaStream, endPoint+1);
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
Log.d(TAG,"onRemoveStream "+mediaStream.label());
removePeer(id);
}
@Override
public void onDataChannel(DataChannel dataChannel) {}
@Override
public void onRenegotiationNeeded() {
}
public Peer(String id, int endPoint) {
Log.d(TAG,"new Peer: "+id + " " + endPoint);
this.pc = factory.createPeerConnection(iceServers, pcConstraints, this);
this.id = id;
this.endPoint = endPoint;
pc.addStream(localMS); //, new MediaConstraints()
mListener.onStatusChanged("CONNECTING");
}
}
private Peer addPeer(String id, int endPoint) {
Peer peer = new Peer(id, endPoint);
peers.put(id, peer);
endPoints[endPoint] = true;
return peer;
}
private void removePeer(String id) {
Peer peer = peers.get(id);
mListener.onRemoveRemoteStream(peer.endPoint);
peer.pc.close();
peers.remove(peer.id);
endPoints[peer.endPoint] = false;
}
public WebRtcClient(RtcListener listener, String host, PeerConnectionParameters params, EGLContext mEGLcontext) {
mListener = listener;
pcParams = params;
PeerConnectionFactory.initializeAndroidGlobals(listener, true, true,
params.videoCodecHwAcceleration, mEGLcontext);
factory = new PeerConnectionFactory();
MessageHandler messageHandler = new MessageHandler();
try {
client = IO.socket(host);
} catch (URISyntaxException e) {
e.printStackTrace();
}
client.on("id", messageHandler.onId);
client.on("message", messageHandler.onMessage);
client.connect();
iceServers.add(new PeerConnection.IceServer("stun:23.21.150.121"));
iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
/**
* Call this method in Activity.onPause()
*/
public void onPause() {
if(videoSource != null) videoSource.stop();
}
/**
* Call this method in Activity.onResume()
*/
public void onResume() {
if(videoSource != null) videoSource.restart();
}
/**
* Call this method in Activity.onDestroy()
*/
public void onDestroy() {
for (Peer peer : peers.values()) {
peer.pc.dispose();
}
videoSource.dispose();
factory.dispose();
client.disconnect();
client.close();
}
private int findEndPoint() {
for(int i = 0; i < MAX_PEER; i++) if (!endPoints[i]) return i;
return MAX_PEER;
}
/**
* Start the client.
*
* Set up the local stream and notify the signaling server.
* Call this method after onCallReady.
*
* @param name client name
*/
public void start(String name){
setCamera();
try {
JSONObject message = new JSONObject();
message.put("name", name);
client.emit("readyToStream", message);
} catch (JSONException e) {
e.printStackTrace();
}
}
private void setCamera(){
localMS = factory.createLocalMediaStream("ARDAMS");
if(pcParams.videoCallEnabled){
MediaConstraints videoConstraints = new MediaConstraints();
videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", Integer.toString(pcParams.videoHeight)));
videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", Integer.toString(pcParams.videoWidth)));
videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", Integer.toString(pcParams.videoFps)));
videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minFrameRate", Integer.toString(pcParams.videoFps)));
videoSource = factory.createVideoSource(getVideoCapturer(), videoConstraints);
localMS.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource));
}
AudioSource audioSource = factory.createAudioSource(new MediaConstraints());
localMS.addTrack(factory.createAudioTrack("ARDAMSa0", audioSource));
mListener.onLocalStream(localMS);
}
private VideoCapturer getVideoCapturer() {
String frontCameraDeviceName = VideoCapturerAndroid.getNameOfFrontFacingDevice();
return VideoCapturerAndroid.create(frontCameraDeviceName);
}
}
|
package types.linkedlist;
import util.adt.LinkNode;
public class MergeTwoSortedListIterative {
public LinkNode merge(LinkNode n1, LinkNode n2) {
LinkNode rs = new LinkNode(-1, null);
LinkNode cur = rs;
while(n1 != null && n2 != null) {
if(n1.getData().compareTo(n2.getData()) < 0) {
cur.setNext(new LinkNode(n1.getData(), null));
n1 = n1.getNext();
}else {
cur.setNext(new LinkNode(n2.getData(), null));
n2 = n2.getNext();
}
cur = cur.getNext();
}
while(n1 != null) {
cur.setNext(new LinkNode(n1.getData(), null));
n1 = n1.getNext();
cur = cur.getNext();
}
while(n2 != null) {
cur.setNext(new LinkNode(n2.getData(), null));
n2 = n2.getNext();
cur = cur.getNext();
}
return rs.getNext();
}
}
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Quota.proto
package org.apache.hadoop.hbase.shaded.protobuf.generated;
@javax.annotation.Generated("proto") public final class QuotaProtos {
private QuotaProtos() {}
public static void registerAllExtensions(
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf enum {@code hbase.pb.QuotaScope}
*/
public enum QuotaScope
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* <code>CLUSTER = 1;</code>
*/
CLUSTER(1),
/**
* <code>MACHINE = 2;</code>
*/
MACHINE(2),
;
/**
* <code>CLUSTER = 1;</code>
*/
public static final int CLUSTER_VALUE = 1;
/**
* <code>MACHINE = 2;</code>
*/
public static final int MACHINE_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static QuotaScope valueOf(int value) {
return forNumber(value);
}
public static QuotaScope forNumber(int value) {
switch (value) {
case 1: return CLUSTER;
case 2: return MACHINE;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<QuotaScope>
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
QuotaScope> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<QuotaScope>() {
public QuotaScope findValueByNumber(int number) {
return QuotaScope.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(0);
}
private static final QuotaScope[] VALUES = values();
public static QuotaScope valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private QuotaScope(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.QuotaScope)
}
/**
* Protobuf enum {@code hbase.pb.ThrottleType}
*/
public enum ThrottleType
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* <code>REQUEST_NUMBER = 1;</code>
*/
REQUEST_NUMBER(1),
/**
* <code>REQUEST_SIZE = 2;</code>
*/
REQUEST_SIZE(2),
/**
* <code>WRITE_NUMBER = 3;</code>
*/
WRITE_NUMBER(3),
/**
* <code>WRITE_SIZE = 4;</code>
*/
WRITE_SIZE(4),
/**
* <code>READ_NUMBER = 5;</code>
*/
READ_NUMBER(5),
/**
* <code>READ_SIZE = 6;</code>
*/
READ_SIZE(6),
/**
* <code>REQUEST_CAPACITY_UNIT = 7;</code>
*/
REQUEST_CAPACITY_UNIT(7),
/**
* <code>WRITE_CAPACITY_UNIT = 8;</code>
*/
WRITE_CAPACITY_UNIT(8),
/**
* <code>READ_CAPACITY_UNIT = 9;</code>
*/
READ_CAPACITY_UNIT(9),
;
/**
* <code>REQUEST_NUMBER = 1;</code>
*/
public static final int REQUEST_NUMBER_VALUE = 1;
/**
* <code>REQUEST_SIZE = 2;</code>
*/
public static final int REQUEST_SIZE_VALUE = 2;
/**
* <code>WRITE_NUMBER = 3;</code>
*/
public static final int WRITE_NUMBER_VALUE = 3;
/**
* <code>WRITE_SIZE = 4;</code>
*/
public static final int WRITE_SIZE_VALUE = 4;
/**
* <code>READ_NUMBER = 5;</code>
*/
public static final int READ_NUMBER_VALUE = 5;
/**
* <code>READ_SIZE = 6;</code>
*/
public static final int READ_SIZE_VALUE = 6;
/**
* <code>REQUEST_CAPACITY_UNIT = 7;</code>
*/
public static final int REQUEST_CAPACITY_UNIT_VALUE = 7;
/**
* <code>WRITE_CAPACITY_UNIT = 8;</code>
*/
public static final int WRITE_CAPACITY_UNIT_VALUE = 8;
/**
* <code>READ_CAPACITY_UNIT = 9;</code>
*/
public static final int READ_CAPACITY_UNIT_VALUE = 9;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ThrottleType valueOf(int value) {
return forNumber(value);
}
public static ThrottleType forNumber(int value) {
switch (value) {
case 1: return REQUEST_NUMBER;
case 2: return REQUEST_SIZE;
case 3: return WRITE_NUMBER;
case 4: return WRITE_SIZE;
case 5: return READ_NUMBER;
case 6: return READ_SIZE;
case 7: return REQUEST_CAPACITY_UNIT;
case 8: return WRITE_CAPACITY_UNIT;
case 9: return READ_CAPACITY_UNIT;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<ThrottleType>
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
ThrottleType> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<ThrottleType>() {
public ThrottleType findValueByNumber(int number) {
return ThrottleType.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(1);
}
private static final ThrottleType[] VALUES = values();
public static ThrottleType valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private ThrottleType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.ThrottleType)
}
/**
* Protobuf enum {@code hbase.pb.QuotaType}
*/
public enum QuotaType
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* <code>THROTTLE = 1;</code>
*/
THROTTLE(1),
/**
* <code>SPACE = 2;</code>
*/
SPACE(2),
;
/**
* <code>THROTTLE = 1;</code>
*/
public static final int THROTTLE_VALUE = 1;
/**
* <code>SPACE = 2;</code>
*/
public static final int SPACE_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static QuotaType valueOf(int value) {
return forNumber(value);
}
public static QuotaType forNumber(int value) {
switch (value) {
case 1: return THROTTLE;
case 2: return SPACE;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<QuotaType>
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
QuotaType> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<QuotaType>() {
public QuotaType findValueByNumber(int number) {
return QuotaType.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(2);
}
private static final QuotaType[] VALUES = values();
public static QuotaType valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private QuotaType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.QuotaType)
}
/**
* <pre>
* Defines what action should be taken when the SpaceQuota is violated
* </pre>
*
* Protobuf enum {@code hbase.pb.SpaceViolationPolicy}
*/
public enum SpaceViolationPolicy
implements org.apache.hbase.thirdparty.com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Disable the table(s)
* </pre>
*
* <code>DISABLE = 1;</code>
*/
DISABLE(1),
/**
* <pre>
* No writes, bulk-loads, or compactions
* </pre>
*
* <code>NO_WRITES_COMPACTIONS = 2;</code>
*/
NO_WRITES_COMPACTIONS(2),
/**
* <pre>
* No writes or bulk-loads
* </pre>
*
* <code>NO_WRITES = 3;</code>
*/
NO_WRITES(3),
/**
* <pre>
* No puts or bulk-loads, but deletes are allowed
* </pre>
*
* <code>NO_INSERTS = 4;</code>
*/
NO_INSERTS(4),
;
/**
* <pre>
* Disable the table(s)
* </pre>
*
* <code>DISABLE = 1;</code>
*/
public static final int DISABLE_VALUE = 1;
/**
* <pre>
* No writes, bulk-loads, or compactions
* </pre>
*
* <code>NO_WRITES_COMPACTIONS = 2;</code>
*/
public static final int NO_WRITES_COMPACTIONS_VALUE = 2;
/**
* <pre>
* No writes or bulk-loads
* </pre>
*
* <code>NO_WRITES = 3;</code>
*/
public static final int NO_WRITES_VALUE = 3;
/**
* <pre>
* No puts or bulk-loads, but deletes are allowed
* </pre>
*
* <code>NO_INSERTS = 4;</code>
*/
public static final int NO_INSERTS_VALUE = 4;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SpaceViolationPolicy valueOf(int value) {
return forNumber(value);
}
public static SpaceViolationPolicy forNumber(int value) {
switch (value) {
case 1: return DISABLE;
case 2: return NO_WRITES_COMPACTIONS;
case 3: return NO_WRITES;
case 4: return NO_INSERTS;
default: return null;
}
}
public static org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<SpaceViolationPolicy>
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<
SpaceViolationPolicy> internalValueMap =
new org.apache.hbase.thirdparty.com.google.protobuf.Internal.EnumLiteMap<SpaceViolationPolicy>() {
public SpaceViolationPolicy findValueByNumber(int number) {
return SpaceViolationPolicy.forNumber(number);
}
};
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(3);
}
private static final SpaceViolationPolicy[] VALUES = values();
public static SpaceViolationPolicy valueOf(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private SpaceViolationPolicy(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.SpaceViolationPolicy)
}
public interface TimedQuotaOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.TimedQuota)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
boolean hasTimeUnit();
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit();
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
boolean hasSoftLimit();
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
long getSoftLimit();
/**
* <code>optional float share = 3;</code>
*/
boolean hasShare();
/**
* <code>optional float share = 3;</code>
*/
float getShare();
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
boolean hasScope();
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope getScope();
}
/**
* Protobuf type {@code hbase.pb.TimedQuota}
*/
@javax.annotation.Generated("proto") public static final class TimedQuota extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.TimedQuota)
TimedQuotaOrBuilder {
private static final long serialVersionUID = 0L;
// Use TimedQuota.newBuilder() to construct.
private TimedQuota(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TimedQuota() {
timeUnit_ = 1;
softLimit_ = 0L;
share_ = 0F;
scope_ = 2;
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TimedQuota(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit value = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
timeUnit_ = rawValue;
}
break;
}
case 16: {
bitField0_ |= 0x00000002;
softLimit_ = input.readUInt64();
break;
}
case 29: {
bitField0_ |= 0x00000004;
share_ = input.readFloat();
break;
}
case 32: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope value = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(4, rawValue);
} else {
bitField0_ |= 0x00000008;
scope_ = rawValue;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder.class);
}
private int bitField0_;
public static final int TIME_UNIT_FIELD_NUMBER = 1;
private int timeUnit_;
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
public boolean hasTimeUnit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.valueOf(timeUnit_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS : result;
}
public static final int SOFT_LIMIT_FIELD_NUMBER = 2;
private long softLimit_;
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
public boolean hasSoftLimit() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
public long getSoftLimit() {
return softLimit_;
}
public static final int SHARE_FIELD_NUMBER = 3;
private float share_;
/**
* <code>optional float share = 3;</code>
*/
public boolean hasShare() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional float share = 3;</code>
*/
public float getShare() {
return share_;
}
public static final int SCOPE_FIELD_NUMBER = 4;
private int scope_;
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
public boolean hasScope() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope getScope() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.valueOf(scope_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE : result;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasTimeUnit()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, timeUnit_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeUInt64(2, softLimit_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeFloat(3, share_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeEnum(4, scope_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, timeUnit_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(2, softLimit_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeFloatSize(3, share_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(4, scope_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota) obj;
boolean result = true;
result = result && (hasTimeUnit() == other.hasTimeUnit());
if (hasTimeUnit()) {
result = result && timeUnit_ == other.timeUnit_;
}
result = result && (hasSoftLimit() == other.hasSoftLimit());
if (hasSoftLimit()) {
result = result && (getSoftLimit()
== other.getSoftLimit());
}
result = result && (hasShare() == other.hasShare());
if (hasShare()) {
result = result && (
java.lang.Float.floatToIntBits(getShare())
== java.lang.Float.floatToIntBits(
other.getShare()));
}
result = result && (hasScope() == other.hasScope());
if (hasScope()) {
result = result && scope_ == other.scope_;
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTimeUnit()) {
hash = (37 * hash) + TIME_UNIT_FIELD_NUMBER;
hash = (53 * hash) + timeUnit_;
}
if (hasSoftLimit()) {
hash = (37 * hash) + SOFT_LIMIT_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getSoftLimit());
}
if (hasShare()) {
hash = (37 * hash) + SHARE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getShare());
}
if (hasScope()) {
hash = (37 * hash) + SCOPE_FIELD_NUMBER;
hash = (53 * hash) + scope_;
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.TimedQuota}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.TimedQuota)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
timeUnit_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
softLimit_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
share_ = 0F;
bitField0_ = (bitField0_ & ~0x00000004);
scope_ = 2;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.timeUnit_ = timeUnit_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.softLimit_ = softLimit_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.share_ = share_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.scope_ = scope_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) return this;
if (other.hasTimeUnit()) {
setTimeUnit(other.getTimeUnit());
}
if (other.hasSoftLimit()) {
setSoftLimit(other.getSoftLimit());
}
if (other.hasShare()) {
setShare(other.getShare());
}
if (other.hasScope()) {
setScope(other.getScope());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasTimeUnit()) {
return false;
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int timeUnit_ = 1;
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
public boolean hasTimeUnit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.valueOf(timeUnit_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS : result;
}
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
public Builder setTimeUnit(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
timeUnit_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>required .hbase.pb.TimeUnit time_unit = 1;</code>
*/
public Builder clearTimeUnit() {
bitField0_ = (bitField0_ & ~0x00000001);
timeUnit_ = 1;
onChanged();
return this;
}
private long softLimit_ ;
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
public boolean hasSoftLimit() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
public long getSoftLimit() {
return softLimit_;
}
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
public Builder setSoftLimit(long value) {
bitField0_ |= 0x00000002;
softLimit_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 soft_limit = 2;</code>
*/
public Builder clearSoftLimit() {
bitField0_ = (bitField0_ & ~0x00000002);
softLimit_ = 0L;
onChanged();
return this;
}
private float share_ ;
/**
* <code>optional float share = 3;</code>
*/
public boolean hasShare() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional float share = 3;</code>
*/
public float getShare() {
return share_;
}
/**
* <code>optional float share = 3;</code>
*/
public Builder setShare(float value) {
bitField0_ |= 0x00000004;
share_ = value;
onChanged();
return this;
}
/**
* <code>optional float share = 3;</code>
*/
public Builder clearShare() {
bitField0_ = (bitField0_ & ~0x00000004);
share_ = 0F;
onChanged();
return this;
}
private int scope_ = 2;
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
public boolean hasScope() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope getScope() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.valueOf(scope_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE : result;
}
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
public Builder setScope(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
scope_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE];</code>
*/
public Builder clearScope() {
bitField0_ = (bitField0_ & ~0x00000008);
scope_ = 2;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.TimedQuota)
}
// @@protoc_insertion_point(class_scope:hbase.pb.TimedQuota)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<TimedQuota>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<TimedQuota>() {
public TimedQuota parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new TimedQuota(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<TimedQuota> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<TimedQuota> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ThrottleOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.Throttle)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
boolean hasReqNum();
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqNum();
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
boolean hasReqSize();
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqSize();
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
boolean hasWriteNum();
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteNum();
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
boolean hasWriteSize();
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteSize();
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
boolean hasReadNum();
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadNum();
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
boolean hasReadSize();
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadSize();
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
boolean hasReqCapacityUnit();
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqCapacityUnit();
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqCapacityUnitOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
boolean hasWriteCapacityUnit();
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteCapacityUnit();
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteCapacityUnitOrBuilder();
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
boolean hasReadCapacityUnit();
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadCapacityUnit();
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadCapacityUnitOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.Throttle}
*/
@javax.annotation.Generated("proto") public static final class Throttle extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.Throttle)
ThrottleOrBuilder {
private static final long serialVersionUID = 0L;
// Use Throttle.newBuilder() to construct.
private Throttle(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Throttle() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Throttle(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = reqNum_.toBuilder();
}
reqNum_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(reqNum_);
reqNum_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = reqSize_.toBuilder();
}
reqSize_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(reqSize_);
reqSize_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 26: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = writeNum_.toBuilder();
}
writeNum_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(writeNum_);
writeNum_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = writeSize_.toBuilder();
}
writeSize_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(writeSize_);
writeSize_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 42: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
subBuilder = readNum_.toBuilder();
}
readNum_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(readNum_);
readNum_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000010;
break;
}
case 50: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000020) == 0x00000020)) {
subBuilder = readSize_.toBuilder();
}
readSize_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(readSize_);
readSize_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000020;
break;
}
case 58: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000040) == 0x00000040)) {
subBuilder = reqCapacityUnit_.toBuilder();
}
reqCapacityUnit_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(reqCapacityUnit_);
reqCapacityUnit_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000040;
break;
}
case 66: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000080) == 0x00000080)) {
subBuilder = writeCapacityUnit_.toBuilder();
}
writeCapacityUnit_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(writeCapacityUnit_);
writeCapacityUnit_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000080;
break;
}
case 74: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000100) == 0x00000100)) {
subBuilder = readCapacityUnit_.toBuilder();
}
readCapacityUnit_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(readCapacityUnit_);
readCapacityUnit_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000100;
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder.class);
}
private int bitField0_;
public static final int REQ_NUM_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqNum_;
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public boolean hasReqNum() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqNum() {
return reqNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_;
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder() {
return reqNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_;
}
public static final int REQ_SIZE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqSize_;
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public boolean hasReqSize() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqSize() {
return reqSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_;
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder() {
return reqSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_;
}
public static final int WRITE_NUM_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeNum_;
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public boolean hasWriteNum() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() {
return writeNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_;
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder() {
return writeNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_;
}
public static final int WRITE_SIZE_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeSize_;
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public boolean hasWriteSize() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() {
return writeSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_;
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder() {
return writeSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_;
}
public static final int READ_NUM_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readNum_;
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public boolean hasReadNum() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadNum() {
return readNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_;
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder() {
return readNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_;
}
public static final int READ_SIZE_FIELD_NUMBER = 6;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readSize_;
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public boolean hasReadSize() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadSize() {
return readSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_;
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder() {
return readSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_;
}
public static final int REQ_CAPACITY_UNIT_FIELD_NUMBER = 7;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqCapacityUnit_;
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public boolean hasReqCapacityUnit() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqCapacityUnit() {
return reqCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqCapacityUnit_;
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqCapacityUnitOrBuilder() {
return reqCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqCapacityUnit_;
}
public static final int WRITE_CAPACITY_UNIT_FIELD_NUMBER = 8;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeCapacityUnit_;
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public boolean hasWriteCapacityUnit() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteCapacityUnit() {
return writeCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeCapacityUnit_;
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteCapacityUnitOrBuilder() {
return writeCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeCapacityUnit_;
}
public static final int READ_CAPACITY_UNIT_FIELD_NUMBER = 9;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readCapacityUnit_;
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public boolean hasReadCapacityUnit() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadCapacityUnit() {
return readCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readCapacityUnit_;
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadCapacityUnitOrBuilder() {
return readCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readCapacityUnit_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasReqNum()) {
if (!getReqNum().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasReqSize()) {
if (!getReqSize().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasWriteNum()) {
if (!getWriteNum().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasWriteSize()) {
if (!getWriteSize().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasReadNum()) {
if (!getReadNum().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasReadSize()) {
if (!getReadSize().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasReqCapacityUnit()) {
if (!getReqCapacityUnit().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasWriteCapacityUnit()) {
if (!getWriteCapacityUnit().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasReadCapacityUnit()) {
if (!getReadCapacityUnit().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, getReqNum());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, getReqSize());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, getWriteNum());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(4, getWriteSize());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeMessage(5, getReadNum());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeMessage(6, getReadSize());
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeMessage(7, getReqCapacityUnit());
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeMessage(8, getWriteCapacityUnit());
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeMessage(9, getReadCapacityUnit());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getReqNum());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getReqSize());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getWriteNum());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getWriteSize());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(5, getReadNum());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(6, getReadSize());
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(7, getReqCapacityUnit());
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(8, getWriteCapacityUnit());
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(9, getReadCapacityUnit());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle) obj;
boolean result = true;
result = result && (hasReqNum() == other.hasReqNum());
if (hasReqNum()) {
result = result && getReqNum()
.equals(other.getReqNum());
}
result = result && (hasReqSize() == other.hasReqSize());
if (hasReqSize()) {
result = result && getReqSize()
.equals(other.getReqSize());
}
result = result && (hasWriteNum() == other.hasWriteNum());
if (hasWriteNum()) {
result = result && getWriteNum()
.equals(other.getWriteNum());
}
result = result && (hasWriteSize() == other.hasWriteSize());
if (hasWriteSize()) {
result = result && getWriteSize()
.equals(other.getWriteSize());
}
result = result && (hasReadNum() == other.hasReadNum());
if (hasReadNum()) {
result = result && getReadNum()
.equals(other.getReadNum());
}
result = result && (hasReadSize() == other.hasReadSize());
if (hasReadSize()) {
result = result && getReadSize()
.equals(other.getReadSize());
}
result = result && (hasReqCapacityUnit() == other.hasReqCapacityUnit());
if (hasReqCapacityUnit()) {
result = result && getReqCapacityUnit()
.equals(other.getReqCapacityUnit());
}
result = result && (hasWriteCapacityUnit() == other.hasWriteCapacityUnit());
if (hasWriteCapacityUnit()) {
result = result && getWriteCapacityUnit()
.equals(other.getWriteCapacityUnit());
}
result = result && (hasReadCapacityUnit() == other.hasReadCapacityUnit());
if (hasReadCapacityUnit()) {
result = result && getReadCapacityUnit()
.equals(other.getReadCapacityUnit());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasReqNum()) {
hash = (37 * hash) + REQ_NUM_FIELD_NUMBER;
hash = (53 * hash) + getReqNum().hashCode();
}
if (hasReqSize()) {
hash = (37 * hash) + REQ_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getReqSize().hashCode();
}
if (hasWriteNum()) {
hash = (37 * hash) + WRITE_NUM_FIELD_NUMBER;
hash = (53 * hash) + getWriteNum().hashCode();
}
if (hasWriteSize()) {
hash = (37 * hash) + WRITE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getWriteSize().hashCode();
}
if (hasReadNum()) {
hash = (37 * hash) + READ_NUM_FIELD_NUMBER;
hash = (53 * hash) + getReadNum().hashCode();
}
if (hasReadSize()) {
hash = (37 * hash) + READ_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getReadSize().hashCode();
}
if (hasReqCapacityUnit()) {
hash = (37 * hash) + REQ_CAPACITY_UNIT_FIELD_NUMBER;
hash = (53 * hash) + getReqCapacityUnit().hashCode();
}
if (hasWriteCapacityUnit()) {
hash = (37 * hash) + WRITE_CAPACITY_UNIT_FIELD_NUMBER;
hash = (53 * hash) + getWriteCapacityUnit().hashCode();
}
if (hasReadCapacityUnit()) {
hash = (37 * hash) + READ_CAPACITY_UNIT_FIELD_NUMBER;
hash = (53 * hash) + getReadCapacityUnit().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Throttle}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.Throttle)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getReqNumFieldBuilder();
getReqSizeFieldBuilder();
getWriteNumFieldBuilder();
getWriteSizeFieldBuilder();
getReadNumFieldBuilder();
getReadSizeFieldBuilder();
getReqCapacityUnitFieldBuilder();
getWriteCapacityUnitFieldBuilder();
getReadCapacityUnitFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (reqNumBuilder_ == null) {
reqNum_ = null;
} else {
reqNumBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (reqSizeBuilder_ == null) {
reqSize_ = null;
} else {
reqSizeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
if (writeNumBuilder_ == null) {
writeNum_ = null;
} else {
writeNumBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (writeSizeBuilder_ == null) {
writeSize_ = null;
} else {
writeSizeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
if (readNumBuilder_ == null) {
readNum_ = null;
} else {
readNumBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
if (readSizeBuilder_ == null) {
readSize_ = null;
} else {
readSizeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
if (reqCapacityUnitBuilder_ == null) {
reqCapacityUnit_ = null;
} else {
reqCapacityUnitBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
if (writeCapacityUnitBuilder_ == null) {
writeCapacityUnit_ = null;
} else {
writeCapacityUnitBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000080);
if (readCapacityUnitBuilder_ == null) {
readCapacityUnit_ = null;
} else {
readCapacityUnitBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (reqNumBuilder_ == null) {
result.reqNum_ = reqNum_;
} else {
result.reqNum_ = reqNumBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (reqSizeBuilder_ == null) {
result.reqSize_ = reqSize_;
} else {
result.reqSize_ = reqSizeBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (writeNumBuilder_ == null) {
result.writeNum_ = writeNum_;
} else {
result.writeNum_ = writeNumBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
if (writeSizeBuilder_ == null) {
result.writeSize_ = writeSize_;
} else {
result.writeSize_ = writeSizeBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
if (readNumBuilder_ == null) {
result.readNum_ = readNum_;
} else {
result.readNum_ = readNumBuilder_.build();
}
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
if (readSizeBuilder_ == null) {
result.readSize_ = readSize_;
} else {
result.readSize_ = readSizeBuilder_.build();
}
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
if (reqCapacityUnitBuilder_ == null) {
result.reqCapacityUnit_ = reqCapacityUnit_;
} else {
result.reqCapacityUnit_ = reqCapacityUnitBuilder_.build();
}
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
if (writeCapacityUnitBuilder_ == null) {
result.writeCapacityUnit_ = writeCapacityUnit_;
} else {
result.writeCapacityUnit_ = writeCapacityUnitBuilder_.build();
}
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000100;
}
if (readCapacityUnitBuilder_ == null) {
result.readCapacityUnit_ = readCapacityUnit_;
} else {
result.readCapacityUnit_ = readCapacityUnitBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance()) return this;
if (other.hasReqNum()) {
mergeReqNum(other.getReqNum());
}
if (other.hasReqSize()) {
mergeReqSize(other.getReqSize());
}
if (other.hasWriteNum()) {
mergeWriteNum(other.getWriteNum());
}
if (other.hasWriteSize()) {
mergeWriteSize(other.getWriteSize());
}
if (other.hasReadNum()) {
mergeReadNum(other.getReadNum());
}
if (other.hasReadSize()) {
mergeReadSize(other.getReadSize());
}
if (other.hasReqCapacityUnit()) {
mergeReqCapacityUnit(other.getReqCapacityUnit());
}
if (other.hasWriteCapacityUnit()) {
mergeWriteCapacityUnit(other.getWriteCapacityUnit());
}
if (other.hasReadCapacityUnit()) {
mergeReadCapacityUnit(other.getReadCapacityUnit());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (hasReqNum()) {
if (!getReqNum().isInitialized()) {
return false;
}
}
if (hasReqSize()) {
if (!getReqSize().isInitialized()) {
return false;
}
}
if (hasWriteNum()) {
if (!getWriteNum().isInitialized()) {
return false;
}
}
if (hasWriteSize()) {
if (!getWriteSize().isInitialized()) {
return false;
}
}
if (hasReadNum()) {
if (!getReadNum().isInitialized()) {
return false;
}
}
if (hasReadSize()) {
if (!getReadSize().isInitialized()) {
return false;
}
}
if (hasReqCapacityUnit()) {
if (!getReqCapacityUnit().isInitialized()) {
return false;
}
}
if (hasWriteCapacityUnit()) {
if (!getWriteCapacityUnit().isInitialized()) {
return false;
}
}
if (hasReadCapacityUnit()) {
if (!getReadCapacityUnit().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqNum_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqNumBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public boolean hasReqNum() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqNum() {
if (reqNumBuilder_ == null) {
return reqNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_;
} else {
return reqNumBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public Builder setReqNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (reqNumBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reqNum_ = value;
onChanged();
} else {
reqNumBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public Builder setReqNum(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (reqNumBuilder_ == null) {
reqNum_ = builderForValue.build();
onChanged();
} else {
reqNumBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public Builder mergeReqNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (reqNumBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
reqNum_ != null &&
reqNum_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
reqNum_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqNum_).mergeFrom(value).buildPartial();
} else {
reqNum_ = value;
}
onChanged();
} else {
reqNumBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public Builder clearReqNum() {
if (reqNumBuilder_ == null) {
reqNum_ = null;
onChanged();
} else {
reqNumBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getReqNumBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getReqNumFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder() {
if (reqNumBuilder_ != null) {
return reqNumBuilder_.getMessageOrBuilder();
} else {
return reqNum_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota req_num = 1;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReqNumFieldBuilder() {
if (reqNumBuilder_ == null) {
reqNumBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getReqNum(),
getParentForChildren(),
isClean());
reqNum_ = null;
}
return reqNumBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqSize_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqSizeBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public boolean hasReqSize() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqSize() {
if (reqSizeBuilder_ == null) {
return reqSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_;
} else {
return reqSizeBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public Builder setReqSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (reqSizeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reqSize_ = value;
onChanged();
} else {
reqSizeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public Builder setReqSize(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (reqSizeBuilder_ == null) {
reqSize_ = builderForValue.build();
onChanged();
} else {
reqSizeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public Builder mergeReqSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (reqSizeBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
reqSize_ != null &&
reqSize_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
reqSize_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqSize_).mergeFrom(value).buildPartial();
} else {
reqSize_ = value;
}
onChanged();
} else {
reqSizeBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public Builder clearReqSize() {
if (reqSizeBuilder_ == null) {
reqSize_ = null;
onChanged();
} else {
reqSizeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getReqSizeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getReqSizeFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder() {
if (reqSizeBuilder_ != null) {
return reqSizeBuilder_.getMessageOrBuilder();
} else {
return reqSize_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota req_size = 2;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReqSizeFieldBuilder() {
if (reqSizeBuilder_ == null) {
reqSizeBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getReqSize(),
getParentForChildren(),
isClean());
reqSize_ = null;
}
return reqSizeBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeNum_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeNumBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public boolean hasWriteNum() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() {
if (writeNumBuilder_ == null) {
return writeNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_;
} else {
return writeNumBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public Builder setWriteNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (writeNumBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
writeNum_ = value;
onChanged();
} else {
writeNumBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public Builder setWriteNum(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (writeNumBuilder_ == null) {
writeNum_ = builderForValue.build();
onChanged();
} else {
writeNumBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public Builder mergeWriteNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (writeNumBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
writeNum_ != null &&
writeNum_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
writeNum_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeNum_).mergeFrom(value).buildPartial();
} else {
writeNum_ = value;
}
onChanged();
} else {
writeNumBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public Builder clearWriteNum() {
if (writeNumBuilder_ == null) {
writeNum_ = null;
onChanged();
} else {
writeNumBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getWriteNumBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getWriteNumFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder() {
if (writeNumBuilder_ != null) {
return writeNumBuilder_.getMessageOrBuilder();
} else {
return writeNum_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota write_num = 3;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getWriteNumFieldBuilder() {
if (writeNumBuilder_ == null) {
writeNumBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getWriteNum(),
getParentForChildren(),
isClean());
writeNum_ = null;
}
return writeNumBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeSize_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeSizeBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public boolean hasWriteSize() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() {
if (writeSizeBuilder_ == null) {
return writeSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_;
} else {
return writeSizeBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public Builder setWriteSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (writeSizeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
writeSize_ = value;
onChanged();
} else {
writeSizeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public Builder setWriteSize(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (writeSizeBuilder_ == null) {
writeSize_ = builderForValue.build();
onChanged();
} else {
writeSizeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public Builder mergeWriteSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (writeSizeBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
writeSize_ != null &&
writeSize_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
writeSize_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeSize_).mergeFrom(value).buildPartial();
} else {
writeSize_ = value;
}
onChanged();
} else {
writeSizeBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public Builder clearWriteSize() {
if (writeSizeBuilder_ == null) {
writeSize_ = null;
onChanged();
} else {
writeSizeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getWriteSizeBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getWriteSizeFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder() {
if (writeSizeBuilder_ != null) {
return writeSizeBuilder_.getMessageOrBuilder();
} else {
return writeSize_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota write_size = 4;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getWriteSizeFieldBuilder() {
if (writeSizeBuilder_ == null) {
writeSizeBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getWriteSize(),
getParentForChildren(),
isClean());
writeSize_ = null;
}
return writeSizeBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readNum_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> readNumBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public boolean hasReadNum() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadNum() {
if (readNumBuilder_ == null) {
return readNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_;
} else {
return readNumBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public Builder setReadNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (readNumBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
readNum_ = value;
onChanged();
} else {
readNumBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public Builder setReadNum(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (readNumBuilder_ == null) {
readNum_ = builderForValue.build();
onChanged();
} else {
readNumBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public Builder mergeReadNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (readNumBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010) &&
readNum_ != null &&
readNum_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
readNum_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(readNum_).mergeFrom(value).buildPartial();
} else {
readNum_ = value;
}
onChanged();
} else {
readNumBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public Builder clearReadNum() {
if (readNumBuilder_ == null) {
readNum_ = null;
onChanged();
} else {
readNumBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getReadNumBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getReadNumFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder() {
if (readNumBuilder_ != null) {
return readNumBuilder_.getMessageOrBuilder();
} else {
return readNum_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota read_num = 5;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReadNumFieldBuilder() {
if (readNumBuilder_ == null) {
readNumBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getReadNum(),
getParentForChildren(),
isClean());
readNum_ = null;
}
return readNumBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readSize_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> readSizeBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public boolean hasReadSize() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadSize() {
if (readSizeBuilder_ == null) {
return readSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_;
} else {
return readSizeBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public Builder setReadSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (readSizeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
readSize_ = value;
onChanged();
} else {
readSizeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public Builder setReadSize(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (readSizeBuilder_ == null) {
readSize_ = builderForValue.build();
onChanged();
} else {
readSizeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public Builder mergeReadSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (readSizeBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020) &&
readSize_ != null &&
readSize_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
readSize_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(readSize_).mergeFrom(value).buildPartial();
} else {
readSize_ = value;
}
onChanged();
} else {
readSizeBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public Builder clearReadSize() {
if (readSizeBuilder_ == null) {
readSize_ = null;
onChanged();
} else {
readSizeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getReadSizeBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getReadSizeFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder() {
if (readSizeBuilder_ != null) {
return readSizeBuilder_.getMessageOrBuilder();
} else {
return readSize_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota read_size = 6;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReadSizeFieldBuilder() {
if (readSizeBuilder_ == null) {
readSizeBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getReadSize(),
getParentForChildren(),
isClean());
readSize_ = null;
}
return readSizeBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqCapacityUnit_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqCapacityUnitBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public boolean hasReqCapacityUnit() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqCapacityUnit() {
if (reqCapacityUnitBuilder_ == null) {
return reqCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqCapacityUnit_;
} else {
return reqCapacityUnitBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public Builder setReqCapacityUnit(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (reqCapacityUnitBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reqCapacityUnit_ = value;
onChanged();
} else {
reqCapacityUnitBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public Builder setReqCapacityUnit(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (reqCapacityUnitBuilder_ == null) {
reqCapacityUnit_ = builderForValue.build();
onChanged();
} else {
reqCapacityUnitBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public Builder mergeReqCapacityUnit(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (reqCapacityUnitBuilder_ == null) {
if (((bitField0_ & 0x00000040) == 0x00000040) &&
reqCapacityUnit_ != null &&
reqCapacityUnit_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
reqCapacityUnit_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqCapacityUnit_).mergeFrom(value).buildPartial();
} else {
reqCapacityUnit_ = value;
}
onChanged();
} else {
reqCapacityUnitBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public Builder clearReqCapacityUnit() {
if (reqCapacityUnitBuilder_ == null) {
reqCapacityUnit_ = null;
onChanged();
} else {
reqCapacityUnitBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getReqCapacityUnitBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getReqCapacityUnitFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqCapacityUnitOrBuilder() {
if (reqCapacityUnitBuilder_ != null) {
return reqCapacityUnitBuilder_.getMessageOrBuilder();
} else {
return reqCapacityUnit_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqCapacityUnit_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota req_capacity_unit = 7;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReqCapacityUnitFieldBuilder() {
if (reqCapacityUnitBuilder_ == null) {
reqCapacityUnitBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getReqCapacityUnit(),
getParentForChildren(),
isClean());
reqCapacityUnit_ = null;
}
return reqCapacityUnitBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeCapacityUnit_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeCapacityUnitBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public boolean hasWriteCapacityUnit() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteCapacityUnit() {
if (writeCapacityUnitBuilder_ == null) {
return writeCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeCapacityUnit_;
} else {
return writeCapacityUnitBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public Builder setWriteCapacityUnit(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (writeCapacityUnitBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
writeCapacityUnit_ = value;
onChanged();
} else {
writeCapacityUnitBuilder_.setMessage(value);
}
bitField0_ |= 0x00000080;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public Builder setWriteCapacityUnit(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (writeCapacityUnitBuilder_ == null) {
writeCapacityUnit_ = builderForValue.build();
onChanged();
} else {
writeCapacityUnitBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000080;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public Builder mergeWriteCapacityUnit(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (writeCapacityUnitBuilder_ == null) {
if (((bitField0_ & 0x00000080) == 0x00000080) &&
writeCapacityUnit_ != null &&
writeCapacityUnit_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
writeCapacityUnit_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeCapacityUnit_).mergeFrom(value).buildPartial();
} else {
writeCapacityUnit_ = value;
}
onChanged();
} else {
writeCapacityUnitBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000080;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public Builder clearWriteCapacityUnit() {
if (writeCapacityUnitBuilder_ == null) {
writeCapacityUnit_ = null;
onChanged();
} else {
writeCapacityUnitBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000080);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getWriteCapacityUnitBuilder() {
bitField0_ |= 0x00000080;
onChanged();
return getWriteCapacityUnitFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteCapacityUnitOrBuilder() {
if (writeCapacityUnitBuilder_ != null) {
return writeCapacityUnitBuilder_.getMessageOrBuilder();
} else {
return writeCapacityUnit_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeCapacityUnit_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota write_capacity_unit = 8;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getWriteCapacityUnitFieldBuilder() {
if (writeCapacityUnitBuilder_ == null) {
writeCapacityUnitBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getWriteCapacityUnit(),
getParentForChildren(),
isClean());
writeCapacityUnit_ = null;
}
return writeCapacityUnitBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readCapacityUnit_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> readCapacityUnitBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public boolean hasReadCapacityUnit() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadCapacityUnit() {
if (readCapacityUnitBuilder_ == null) {
return readCapacityUnit_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readCapacityUnit_;
} else {
return readCapacityUnitBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public Builder setReadCapacityUnit(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (readCapacityUnitBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
readCapacityUnit_ = value;
onChanged();
} else {
readCapacityUnitBuilder_.setMessage(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public Builder setReadCapacityUnit(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (readCapacityUnitBuilder_ == null) {
readCapacityUnit_ = builderForValue.build();
onChanged();
} else {
readCapacityUnitBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public Builder mergeReadCapacityUnit(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (readCapacityUnitBuilder_ == null) {
if (((bitField0_ & 0x00000100) == 0x00000100) &&
readCapacityUnit_ != null &&
readCapacityUnit_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
readCapacityUnit_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(readCapacityUnit_).mergeFrom(value).buildPartial();
} else {
readCapacityUnit_ = value;
}
onChanged();
} else {
readCapacityUnitBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public Builder clearReadCapacityUnit() {
if (readCapacityUnitBuilder_ == null) {
readCapacityUnit_ = null;
onChanged();
} else {
readCapacityUnitBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getReadCapacityUnitBuilder() {
bitField0_ |= 0x00000100;
onChanged();
return getReadCapacityUnitFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadCapacityUnitOrBuilder() {
if (readCapacityUnitBuilder_ != null) {
return readCapacityUnitBuilder_.getMessageOrBuilder();
} else {
return readCapacityUnit_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readCapacityUnit_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota read_capacity_unit = 9;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReadCapacityUnitFieldBuilder() {
if (readCapacityUnitBuilder_ == null) {
readCapacityUnitBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getReadCapacityUnit(),
getParentForChildren(),
isClean());
readCapacityUnit_ = null;
}
return readCapacityUnitBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Throttle)
}
// @@protoc_insertion_point(class_scope:hbase.pb.Throttle)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<Throttle>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<Throttle>() {
public Throttle parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new Throttle(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<Throttle> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<Throttle> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ThrottleRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.ThrottleRequest)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
boolean hasType();
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType getType();
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
boolean hasTimedQuota();
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota();
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getTimedQuotaOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.ThrottleRequest}
*/
@javax.annotation.Generated("proto") public static final class ThrottleRequest extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.ThrottleRequest)
ThrottleRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ThrottleRequest.newBuilder() to construct.
private ThrottleRequest(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ThrottleRequest() {
type_ = 1;
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ThrottleRequest(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType value = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
type_ = rawValue;
}
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = timedQuota_.toBuilder();
}
timedQuota_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(timedQuota_);
timedQuota_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.Builder.class);
}
private int bitField0_;
public static final int TYPE_FIELD_NUMBER = 1;
private int type_;
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType getType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.valueOf(type_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER : result;
}
public static final int TIMED_QUOTA_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota timedQuota_;
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public boolean hasTimedQuota() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota() {
return timedQuota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_;
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getTimedQuotaOrBuilder() {
return timedQuota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasTimedQuota()) {
if (!getTimedQuota().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, type_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, getTimedQuota());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, type_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getTimedQuota());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest) obj;
boolean result = true;
result = result && (hasType() == other.hasType());
if (hasType()) {
result = result && type_ == other.type_;
}
result = result && (hasTimedQuota() == other.hasTimedQuota());
if (hasTimedQuota()) {
result = result && getTimedQuota()
.equals(other.getTimedQuota());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasType()) {
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
}
if (hasTimedQuota()) {
hash = (37 * hash) + TIMED_QUOTA_FIELD_NUMBER;
hash = (53 * hash) + getTimedQuota().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ThrottleRequest}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.ThrottleRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTimedQuotaFieldBuilder();
}
}
public Builder clear() {
super.clear();
type_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
if (timedQuotaBuilder_ == null) {
timedQuota_ = null;
} else {
timedQuotaBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.type_ = type_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (timedQuotaBuilder_ == null) {
result.timedQuota_ = timedQuota_;
} else {
result.timedQuota_ = timedQuotaBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance()) return this;
if (other.hasType()) {
setType(other.getType());
}
if (other.hasTimedQuota()) {
mergeTimedQuota(other.getTimedQuota());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (hasTimedQuota()) {
if (!getTimedQuota().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int type_ = 1;
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType getType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.valueOf(type_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER : result;
}
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
public Builder setType(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
type_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.ThrottleType type = 1;</code>
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = 1;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota timedQuota_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> timedQuotaBuilder_;
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public boolean hasTimedQuota() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota() {
if (timedQuotaBuilder_ == null) {
return timedQuota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_;
} else {
return timedQuotaBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public Builder setTimedQuota(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (timedQuotaBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timedQuota_ = value;
onChanged();
} else {
timedQuotaBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public Builder setTimedQuota(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
if (timedQuotaBuilder_ == null) {
timedQuota_ = builderForValue.build();
onChanged();
} else {
timedQuotaBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public Builder mergeTimedQuota(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) {
if (timedQuotaBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
timedQuota_ != null &&
timedQuota_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
timedQuota_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(timedQuota_).mergeFrom(value).buildPartial();
} else {
timedQuota_ = value;
}
onChanged();
} else {
timedQuotaBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public Builder clearTimedQuota() {
if (timedQuotaBuilder_ == null) {
timedQuota_ = null;
onChanged();
} else {
timedQuotaBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder getTimedQuotaBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTimedQuotaFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getTimedQuotaOrBuilder() {
if (timedQuotaBuilder_ != null) {
return timedQuotaBuilder_.getMessageOrBuilder();
} else {
return timedQuota_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_;
}
}
/**
* <code>optional .hbase.pb.TimedQuota timed_quota = 2;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getTimedQuotaFieldBuilder() {
if (timedQuotaBuilder_ == null) {
timedQuotaBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
getTimedQuota(),
getParentForChildren(),
isClean());
timedQuota_ = null;
}
return timedQuotaBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ThrottleRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.ThrottleRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<ThrottleRequest>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<ThrottleRequest>() {
public ThrottleRequest parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new ThrottleRequest(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<ThrottleRequest> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<ThrottleRequest> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface QuotasOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.Quotas)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
boolean hasBypassGlobals();
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
boolean getBypassGlobals();
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
boolean hasThrottle();
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getThrottle();
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder getThrottleOrBuilder();
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
boolean hasSpace();
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getSpace();
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder getSpaceOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.Quotas}
*/
@javax.annotation.Generated("proto") public static final class Quotas extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.Quotas)
QuotasOrBuilder {
private static final long serialVersionUID = 0L;
// Use Quotas.newBuilder() to construct.
private Quotas(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Quotas() {
bypassGlobals_ = false;
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Quotas(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
bypassGlobals_ = input.readBool();
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = throttle_.toBuilder();
}
throttle_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(throttle_);
throttle_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 26: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = space_.toBuilder();
}
space_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(space_);
space_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.Builder.class);
}
private int bitField0_;
public static final int BYPASS_GLOBALS_FIELD_NUMBER = 1;
private boolean bypassGlobals_;
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
public boolean hasBypassGlobals() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
public boolean getBypassGlobals() {
return bypassGlobals_;
}
public static final int THROTTLE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle throttle_;
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public boolean hasThrottle() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getThrottle() {
return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_;
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder getThrottleOrBuilder() {
return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_;
}
public static final int SPACE_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota space_;
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public boolean hasSpace() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getSpace() {
return space_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : space_;
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder getSpaceOrBuilder() {
return space_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : space_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasThrottle()) {
if (!getThrottle().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBool(1, bypassGlobals_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, getThrottle());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, getSpace());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeBoolSize(1, bypassGlobals_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getThrottle());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getSpace());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas) obj;
boolean result = true;
result = result && (hasBypassGlobals() == other.hasBypassGlobals());
if (hasBypassGlobals()) {
result = result && (getBypassGlobals()
== other.getBypassGlobals());
}
result = result && (hasThrottle() == other.hasThrottle());
if (hasThrottle()) {
result = result && getThrottle()
.equals(other.getThrottle());
}
result = result && (hasSpace() == other.hasSpace());
if (hasSpace()) {
result = result && getSpace()
.equals(other.getSpace());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasBypassGlobals()) {
hash = (37 * hash) + BYPASS_GLOBALS_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean(
getBypassGlobals());
}
if (hasThrottle()) {
hash = (37 * hash) + THROTTLE_FIELD_NUMBER;
hash = (53 * hash) + getThrottle().hashCode();
}
if (hasSpace()) {
hash = (37 * hash) + SPACE_FIELD_NUMBER;
hash = (53 * hash) + getSpace().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Quotas}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.Quotas)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotasOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getThrottleFieldBuilder();
getSpaceFieldBuilder();
}
}
public Builder clear() {
super.clear();
bypassGlobals_ = false;
bitField0_ = (bitField0_ & ~0x00000001);
if (throttleBuilder_ == null) {
throttle_ = null;
} else {
throttleBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
if (spaceBuilder_ == null) {
space_ = null;
} else {
spaceBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.bypassGlobals_ = bypassGlobals_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (throttleBuilder_ == null) {
result.throttle_ = throttle_;
} else {
result.throttle_ = throttleBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (spaceBuilder_ == null) {
result.space_ = space_;
} else {
result.space_ = spaceBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.getDefaultInstance()) return this;
if (other.hasBypassGlobals()) {
setBypassGlobals(other.getBypassGlobals());
}
if (other.hasThrottle()) {
mergeThrottle(other.getThrottle());
}
if (other.hasSpace()) {
mergeSpace(other.getSpace());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (hasThrottle()) {
if (!getThrottle().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private boolean bypassGlobals_ ;
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
public boolean hasBypassGlobals() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
public boolean getBypassGlobals() {
return bypassGlobals_;
}
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
public Builder setBypassGlobals(boolean value) {
bitField0_ |= 0x00000001;
bypassGlobals_ = value;
onChanged();
return this;
}
/**
* <code>optional bool bypass_globals = 1 [default = false];</code>
*/
public Builder clearBypassGlobals() {
bitField0_ = (bitField0_ & ~0x00000001);
bypassGlobals_ = false;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle throttle_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder> throttleBuilder_;
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public boolean hasThrottle() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getThrottle() {
if (throttleBuilder_ == null) {
return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_;
} else {
return throttleBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public Builder setThrottle(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle value) {
if (throttleBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
throttle_ = value;
onChanged();
} else {
throttleBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public Builder setThrottle(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder builderForValue) {
if (throttleBuilder_ == null) {
throttle_ = builderForValue.build();
onChanged();
} else {
throttleBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public Builder mergeThrottle(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle value) {
if (throttleBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
throttle_ != null &&
throttle_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance()) {
throttle_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.newBuilder(throttle_).mergeFrom(value).buildPartial();
} else {
throttle_ = value;
}
onChanged();
} else {
throttleBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public Builder clearThrottle() {
if (throttleBuilder_ == null) {
throttle_ = null;
onChanged();
} else {
throttleBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder getThrottleBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getThrottleFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder getThrottleOrBuilder() {
if (throttleBuilder_ != null) {
return throttleBuilder_.getMessageOrBuilder();
} else {
return throttle_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_;
}
}
/**
* <code>optional .hbase.pb.Throttle throttle = 2;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder>
getThrottleFieldBuilder() {
if (throttleBuilder_ == null) {
throttleBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder>(
getThrottle(),
getParentForChildren(),
isClean());
throttle_ = null;
}
return throttleBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota space_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder> spaceBuilder_;
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public boolean hasSpace() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getSpace() {
if (spaceBuilder_ == null) {
return space_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : space_;
} else {
return spaceBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public Builder setSpace(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota value) {
if (spaceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
space_ = value;
onChanged();
} else {
spaceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public Builder setSpace(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder builderForValue) {
if (spaceBuilder_ == null) {
space_ = builderForValue.build();
onChanged();
} else {
spaceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public Builder mergeSpace(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota value) {
if (spaceBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
space_ != null &&
space_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance()) {
space_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.newBuilder(space_).mergeFrom(value).buildPartial();
} else {
space_ = value;
}
onChanged();
} else {
spaceBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public Builder clearSpace() {
if (spaceBuilder_ == null) {
space_ = null;
onChanged();
} else {
spaceBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder getSpaceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getSpaceFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder getSpaceOrBuilder() {
if (spaceBuilder_ != null) {
return spaceBuilder_.getMessageOrBuilder();
} else {
return space_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : space_;
}
}
/**
* <code>optional .hbase.pb.SpaceQuota space = 3;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder>
getSpaceFieldBuilder() {
if (spaceBuilder_ == null) {
spaceBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder>(
getSpace(),
getParentForChildren(),
isClean());
space_ = null;
}
return spaceBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Quotas)
}
// @@protoc_insertion_point(class_scope:hbase.pb.Quotas)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<Quotas>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<Quotas>() {
public Quotas parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new Quotas(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<Quotas> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<Quotas> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface QuotaUsageOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.QuotaUsage)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hbase.pb.QuotaUsage}
*/
@javax.annotation.Generated("proto") public static final class QuotaUsage extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.QuotaUsage)
QuotaUsageOrBuilder {
private static final long serialVersionUID = 0L;
// Use QuotaUsage.newBuilder() to construct.
private QuotaUsage(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QuotaUsage() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private QuotaUsage(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.Builder.class);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.QuotaUsage}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.QuotaUsage)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsageOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.QuotaUsage)
}
// @@protoc_insertion_point(class_scope:hbase.pb.QuotaUsage)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<QuotaUsage>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<QuotaUsage>() {
public QuotaUsage parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new QuotaUsage(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<QuotaUsage> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<QuotaUsage> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SpaceQuotaOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.SpaceQuota)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
boolean hasSoftLimit();
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
long getSoftLimit();
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
boolean hasViolationPolicy();
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy getViolationPolicy();
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
boolean hasRemove();
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
boolean getRemove();
}
/**
* <pre>
* Defines a limit on the amount of filesystem space used by a table/namespace
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceQuota}
*/
@javax.annotation.Generated("proto") public static final class SpaceQuota extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.SpaceQuota)
SpaceQuotaOrBuilder {
private static final long serialVersionUID = 0L;
// Use SpaceQuota.newBuilder() to construct.
private SpaceQuota(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SpaceQuota() {
softLimit_ = 0L;
violationPolicy_ = 1;
remove_ = false;
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SpaceQuota(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
softLimit_ = input.readUInt64();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy value = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
violationPolicy_ = rawValue;
}
break;
}
case 24: {
bitField0_ |= 0x00000004;
remove_ = input.readBool();
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuota_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuota_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder.class);
}
private int bitField0_;
public static final int SOFT_LIMIT_FIELD_NUMBER = 1;
private long softLimit_;
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
public boolean hasSoftLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
public long getSoftLimit() {
return softLimit_;
}
public static final int VIOLATION_POLICY_FIELD_NUMBER = 2;
private int violationPolicy_;
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
public boolean hasViolationPolicy() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy getViolationPolicy() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.valueOf(violationPolicy_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.DISABLE : result;
}
public static final int REMOVE_FIELD_NUMBER = 3;
private boolean remove_;
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
public boolean hasRemove() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
public boolean getRemove() {
return remove_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt64(1, softLimit_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, violationPolicy_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBool(3, remove_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, softLimit_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(2, violationPolicy_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeBoolSize(3, remove_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota) obj;
boolean result = true;
result = result && (hasSoftLimit() == other.hasSoftLimit());
if (hasSoftLimit()) {
result = result && (getSoftLimit()
== other.getSoftLimit());
}
result = result && (hasViolationPolicy() == other.hasViolationPolicy());
if (hasViolationPolicy()) {
result = result && violationPolicy_ == other.violationPolicy_;
}
result = result && (hasRemove() == other.hasRemove());
if (hasRemove()) {
result = result && (getRemove()
== other.getRemove());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSoftLimit()) {
hash = (37 * hash) + SOFT_LIMIT_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getSoftLimit());
}
if (hasViolationPolicy()) {
hash = (37 * hash) + VIOLATION_POLICY_FIELD_NUMBER;
hash = (53 * hash) + violationPolicy_;
}
if (hasRemove()) {
hash = (37 * hash) + REMOVE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean(
getRemove());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Defines a limit on the amount of filesystem space used by a table/namespace
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceQuota}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.SpaceQuota)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuota_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuota_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
softLimit_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
violationPolicy_ = 1;
bitField0_ = (bitField0_ & ~0x00000002);
remove_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuota_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.softLimit_ = softLimit_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.violationPolicy_ = violationPolicy_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.remove_ = remove_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance()) return this;
if (other.hasSoftLimit()) {
setSoftLimit(other.getSoftLimit());
}
if (other.hasViolationPolicy()) {
setViolationPolicy(other.getViolationPolicy());
}
if (other.hasRemove()) {
setRemove(other.getRemove());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long softLimit_ ;
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
public boolean hasSoftLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
public long getSoftLimit() {
return softLimit_;
}
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
public Builder setSoftLimit(long value) {
bitField0_ |= 0x00000001;
softLimit_ = value;
onChanged();
return this;
}
/**
* <pre>
* The limit of bytes for this quota
* </pre>
*
* <code>optional uint64 soft_limit = 1;</code>
*/
public Builder clearSoftLimit() {
bitField0_ = (bitField0_ & ~0x00000001);
softLimit_ = 0L;
onChanged();
return this;
}
private int violationPolicy_ = 1;
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
public boolean hasViolationPolicy() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy getViolationPolicy() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.valueOf(violationPolicy_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.DISABLE : result;
}
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
public Builder setViolationPolicy(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
violationPolicy_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The action to take when the quota is violated
* </pre>
*
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 2;</code>
*/
public Builder clearViolationPolicy() {
bitField0_ = (bitField0_ & ~0x00000002);
violationPolicy_ = 1;
onChanged();
return this;
}
private boolean remove_ ;
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
public boolean hasRemove() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
public boolean getRemove() {
return remove_;
}
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
public Builder setRemove(boolean value) {
bitField0_ |= 0x00000004;
remove_ = value;
onChanged();
return this;
}
/**
* <pre>
* When true, remove the quota.
* </pre>
*
* <code>optional bool remove = 3 [default = false];</code>
*/
public Builder clearRemove() {
bitField0_ = (bitField0_ & ~0x00000004);
remove_ = false;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.SpaceQuota)
}
// @@protoc_insertion_point(class_scope:hbase.pb.SpaceQuota)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuota>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<SpaceQuota>() {
public SpaceQuota parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new SpaceQuota(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuota> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuota> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SpaceLimitRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.SpaceLimitRequest)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
boolean hasQuota();
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getQuota();
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder getQuotaOrBuilder();
}
/**
* <pre>
* The Request to limit space usage (to allow for schema evolution not tied to SpaceQuota).
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceLimitRequest}
*/
@javax.annotation.Generated("proto") public static final class SpaceLimitRequest extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.SpaceLimitRequest)
SpaceLimitRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SpaceLimitRequest.newBuilder() to construct.
private SpaceLimitRequest(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SpaceLimitRequest() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SpaceLimitRequest(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = quota_.toBuilder();
}
quota_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(quota_);
quota_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceLimitRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceLimitRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest.Builder.class);
}
private int bitField0_;
public static final int QUOTA_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota quota_;
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public boolean hasQuota() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getQuota() {
return quota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : quota_;
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder getQuotaOrBuilder() {
return quota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : quota_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, getQuota());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getQuota());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest) obj;
boolean result = true;
result = result && (hasQuota() == other.hasQuota());
if (hasQuota()) {
result = result && getQuota()
.equals(other.getQuota());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasQuota()) {
hash = (37 * hash) + QUOTA_FIELD_NUMBER;
hash = (53 * hash) + getQuota().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The Request to limit space usage (to allow for schema evolution not tied to SpaceQuota).
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceLimitRequest}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.SpaceLimitRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequestOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceLimitRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceLimitRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getQuotaFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (quotaBuilder_ == null) {
quota_ = null;
} else {
quotaBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceLimitRequest_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (quotaBuilder_ == null) {
result.quota_ = quota_;
} else {
result.quota_ = quotaBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest.getDefaultInstance()) return this;
if (other.hasQuota()) {
mergeQuota(other.getQuota());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota quota_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder> quotaBuilder_;
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public boolean hasQuota() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota getQuota() {
if (quotaBuilder_ == null) {
return quota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : quota_;
} else {
return quotaBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public Builder setQuota(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota value) {
if (quotaBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
quota_ = value;
onChanged();
} else {
quotaBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public Builder setQuota(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder builderForValue) {
if (quotaBuilder_ == null) {
quota_ = builderForValue.build();
onChanged();
} else {
quotaBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public Builder mergeQuota(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota value) {
if (quotaBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
quota_ != null &&
quota_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance()) {
quota_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.newBuilder(quota_).mergeFrom(value).buildPartial();
} else {
quota_ = value;
}
onChanged();
} else {
quotaBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public Builder clearQuota() {
if (quotaBuilder_ == null) {
quota_ = null;
onChanged();
} else {
quotaBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder getQuotaBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getQuotaFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder getQuotaOrBuilder() {
if (quotaBuilder_ != null) {
return quotaBuilder_.getMessageOrBuilder();
} else {
return quota_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.getDefaultInstance() : quota_;
}
}
/**
* <code>optional .hbase.pb.SpaceQuota quota = 1;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder>
getQuotaFieldBuilder() {
if (quotaBuilder_ == null) {
quotaBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaOrBuilder>(
getQuota(),
getParentForChildren(),
isClean());
quota_ = null;
}
return quotaBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.SpaceLimitRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.SpaceLimitRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceLimitRequest>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<SpaceLimitRequest>() {
public SpaceLimitRequest parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new SpaceLimitRequest(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceLimitRequest> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceLimitRequest> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SpaceQuotaStatusOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.SpaceQuotaStatus)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
boolean hasViolationPolicy();
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy getViolationPolicy();
/**
* <code>optional bool in_violation = 2;</code>
*/
boolean hasInViolation();
/**
* <code>optional bool in_violation = 2;</code>
*/
boolean getInViolation();
}
/**
* <pre>
* Represents the state of a quota on a table. Either the quota is not in violation
* or it is in violation there is a violation policy which should be in effect.
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceQuotaStatus}
*/
@javax.annotation.Generated("proto") public static final class SpaceQuotaStatus extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.SpaceQuotaStatus)
SpaceQuotaStatusOrBuilder {
private static final long serialVersionUID = 0L;
// Use SpaceQuotaStatus.newBuilder() to construct.
private SpaceQuotaStatus(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SpaceQuotaStatus() {
violationPolicy_ = 1;
inViolation_ = false;
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SpaceQuotaStatus(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy value = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
violationPolicy_ = rawValue;
}
break;
}
case 16: {
bitField0_ |= 0x00000002;
inViolation_ = input.readBool();
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaStatus_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaStatus_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder.class);
}
private int bitField0_;
public static final int VIOLATION_POLICY_FIELD_NUMBER = 1;
private int violationPolicy_;
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
public boolean hasViolationPolicy() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy getViolationPolicy() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.valueOf(violationPolicy_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.DISABLE : result;
}
public static final int IN_VIOLATION_FIELD_NUMBER = 2;
private boolean inViolation_;
/**
* <code>optional bool in_violation = 2;</code>
*/
public boolean hasInViolation() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool in_violation = 2;</code>
*/
public boolean getInViolation() {
return inViolation_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, violationPolicy_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(2, inViolation_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, violationPolicy_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeBoolSize(2, inViolation_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus) obj;
boolean result = true;
result = result && (hasViolationPolicy() == other.hasViolationPolicy());
if (hasViolationPolicy()) {
result = result && violationPolicy_ == other.violationPolicy_;
}
result = result && (hasInViolation() == other.hasInViolation());
if (hasInViolation()) {
result = result && (getInViolation()
== other.getInViolation());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasViolationPolicy()) {
hash = (37 * hash) + VIOLATION_POLICY_FIELD_NUMBER;
hash = (53 * hash) + violationPolicy_;
}
if (hasInViolation()) {
hash = (37 * hash) + IN_VIOLATION_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashBoolean(
getInViolation());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents the state of a quota on a table. Either the quota is not in violation
* or it is in violation there is a violation policy which should be in effect.
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceQuotaStatus}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.SpaceQuotaStatus)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatusOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaStatus_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaStatus_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
violationPolicy_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
inViolation_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaStatus_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.violationPolicy_ = violationPolicy_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.inViolation_ = inViolation_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.getDefaultInstance()) return this;
if (other.hasViolationPolicy()) {
setViolationPolicy(other.getViolationPolicy());
}
if (other.hasInViolation()) {
setInViolation(other.getInViolation());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int violationPolicy_ = 1;
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
public boolean hasViolationPolicy() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy getViolationPolicy() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.valueOf(violationPolicy_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy.DISABLE : result;
}
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
public Builder setViolationPolicy(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
violationPolicy_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.SpaceViolationPolicy violation_policy = 1;</code>
*/
public Builder clearViolationPolicy() {
bitField0_ = (bitField0_ & ~0x00000001);
violationPolicy_ = 1;
onChanged();
return this;
}
private boolean inViolation_ ;
/**
* <code>optional bool in_violation = 2;</code>
*/
public boolean hasInViolation() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool in_violation = 2;</code>
*/
public boolean getInViolation() {
return inViolation_;
}
/**
* <code>optional bool in_violation = 2;</code>
*/
public Builder setInViolation(boolean value) {
bitField0_ |= 0x00000002;
inViolation_ = value;
onChanged();
return this;
}
/**
* <code>optional bool in_violation = 2;</code>
*/
public Builder clearInViolation() {
bitField0_ = (bitField0_ & ~0x00000002);
inViolation_ = false;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.SpaceQuotaStatus)
}
// @@protoc_insertion_point(class_scope:hbase.pb.SpaceQuotaStatus)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuotaStatus>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<SpaceQuotaStatus>() {
public SpaceQuotaStatus parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new SpaceQuotaStatus(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuotaStatus> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuotaStatus> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SpaceQuotaSnapshotOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.SpaceQuotaSnapshot)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
boolean hasQuotaStatus();
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus getQuotaStatus();
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatusOrBuilder getQuotaStatusOrBuilder();
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
boolean hasQuotaUsage();
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
long getQuotaUsage();
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
boolean hasQuotaLimit();
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
long getQuotaLimit();
}
/**
* <pre>
* Message stored in the value of hbase:quota table to denote the status of a table WRT
* the quota applicable to it.
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class SpaceQuotaSnapshot extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.SpaceQuotaSnapshot)
SpaceQuotaSnapshotOrBuilder {
private static final long serialVersionUID = 0L;
// Use SpaceQuotaSnapshot.newBuilder() to construct.
private SpaceQuotaSnapshot(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SpaceQuotaSnapshot() {
quotaUsage_ = 0L;
quotaLimit_ = 0L;
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SpaceQuotaSnapshot(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = quotaStatus_.toBuilder();
}
quotaStatus_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(quotaStatus_);
quotaStatus_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 16: {
bitField0_ |= 0x00000002;
quotaUsage_ = input.readUInt64();
break;
}
case 24: {
bitField0_ |= 0x00000004;
quotaLimit_ = input.readUInt64();
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder.class);
}
private int bitField0_;
public static final int QUOTA_STATUS_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus quotaStatus_;
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public boolean hasQuotaStatus() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus getQuotaStatus() {
return quotaStatus_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.getDefaultInstance() : quotaStatus_;
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatusOrBuilder getQuotaStatusOrBuilder() {
return quotaStatus_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.getDefaultInstance() : quotaStatus_;
}
public static final int QUOTA_USAGE_FIELD_NUMBER = 2;
private long quotaUsage_;
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
public boolean hasQuotaUsage() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
public long getQuotaUsage() {
return quotaUsage_;
}
public static final int QUOTA_LIMIT_FIELD_NUMBER = 3;
private long quotaLimit_;
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
public boolean hasQuotaLimit() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
public long getQuotaLimit() {
return quotaLimit_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, getQuotaStatus());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeUInt64(2, quotaUsage_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt64(3, quotaLimit_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getQuotaStatus());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(2, quotaUsage_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(3, quotaLimit_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot) obj;
boolean result = true;
result = result && (hasQuotaStatus() == other.hasQuotaStatus());
if (hasQuotaStatus()) {
result = result && getQuotaStatus()
.equals(other.getQuotaStatus());
}
result = result && (hasQuotaUsage() == other.hasQuotaUsage());
if (hasQuotaUsage()) {
result = result && (getQuotaUsage()
== other.getQuotaUsage());
}
result = result && (hasQuotaLimit() == other.hasQuotaLimit());
if (hasQuotaLimit()) {
result = result && (getQuotaLimit()
== other.getQuotaLimit());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasQuotaStatus()) {
hash = (37 * hash) + QUOTA_STATUS_FIELD_NUMBER;
hash = (53 * hash) + getQuotaStatus().hashCode();
}
if (hasQuotaUsage()) {
hash = (37 * hash) + QUOTA_USAGE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getQuotaUsage());
}
if (hasQuotaLimit()) {
hash = (37 * hash) + QUOTA_LIMIT_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getQuotaLimit());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Message stored in the value of hbase:quota table to denote the status of a table WRT
* the quota applicable to it.
* </pre>
*
* Protobuf type {@code hbase.pb.SpaceQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.SpaceQuotaSnapshot)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getQuotaStatusFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (quotaStatusBuilder_ == null) {
quotaStatus_ = null;
} else {
quotaStatusBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
quotaUsage_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
quotaLimit_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_SpaceQuotaSnapshot_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (quotaStatusBuilder_ == null) {
result.quotaStatus_ = quotaStatus_;
} else {
result.quotaStatus_ = quotaStatusBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.quotaUsage_ = quotaUsage_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.quotaLimit_ = quotaLimit_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance()) return this;
if (other.hasQuotaStatus()) {
mergeQuotaStatus(other.getQuotaStatus());
}
if (other.hasQuotaUsage()) {
setQuotaUsage(other.getQuotaUsage());
}
if (other.hasQuotaLimit()) {
setQuotaLimit(other.getQuotaLimit());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus quotaStatus_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatusOrBuilder> quotaStatusBuilder_;
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public boolean hasQuotaStatus() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus getQuotaStatus() {
if (quotaStatusBuilder_ == null) {
return quotaStatus_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.getDefaultInstance() : quotaStatus_;
} else {
return quotaStatusBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public Builder setQuotaStatus(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus value) {
if (quotaStatusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
quotaStatus_ = value;
onChanged();
} else {
quotaStatusBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public Builder setQuotaStatus(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder builderForValue) {
if (quotaStatusBuilder_ == null) {
quotaStatus_ = builderForValue.build();
onChanged();
} else {
quotaStatusBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public Builder mergeQuotaStatus(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus value) {
if (quotaStatusBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
quotaStatus_ != null &&
quotaStatus_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.getDefaultInstance()) {
quotaStatus_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.newBuilder(quotaStatus_).mergeFrom(value).buildPartial();
} else {
quotaStatus_ = value;
}
onChanged();
} else {
quotaStatusBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public Builder clearQuotaStatus() {
if (quotaStatusBuilder_ == null) {
quotaStatus_ = null;
onChanged();
} else {
quotaStatusBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder getQuotaStatusBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getQuotaStatusFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatusOrBuilder getQuotaStatusOrBuilder() {
if (quotaStatusBuilder_ != null) {
return quotaStatusBuilder_.getMessageOrBuilder();
} else {
return quotaStatus_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.getDefaultInstance() : quotaStatus_;
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaStatus quota_status = 1;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatusOrBuilder>
getQuotaStatusFieldBuilder() {
if (quotaStatusBuilder_ == null) {
quotaStatusBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatusOrBuilder>(
getQuotaStatus(),
getParentForChildren(),
isClean());
quotaStatus_ = null;
}
return quotaStatusBuilder_;
}
private long quotaUsage_ ;
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
public boolean hasQuotaUsage() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
public long getQuotaUsage() {
return quotaUsage_;
}
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
public Builder setQuotaUsage(long value) {
bitField0_ |= 0x00000002;
quotaUsage_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 quota_usage = 2;</code>
*/
public Builder clearQuotaUsage() {
bitField0_ = (bitField0_ & ~0x00000002);
quotaUsage_ = 0L;
onChanged();
return this;
}
private long quotaLimit_ ;
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
public boolean hasQuotaLimit() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
public long getQuotaLimit() {
return quotaLimit_;
}
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
public Builder setQuotaLimit(long value) {
bitField0_ |= 0x00000004;
quotaLimit_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 quota_limit = 3;</code>
*/
public Builder clearQuotaLimit() {
bitField0_ = (bitField0_ & ~0x00000004);
quotaLimit_ = 0L;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.SpaceQuotaSnapshot)
}
// @@protoc_insertion_point(class_scope:hbase.pb.SpaceQuotaSnapshot)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuotaSnapshot>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<SpaceQuotaSnapshot>() {
public SpaceQuotaSnapshot parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new SpaceQuotaSnapshot(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuotaSnapshot> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<SpaceQuotaSnapshot> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSpaceQuotaRegionSizesRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetSpaceQuotaRegionSizesRequest)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaRegionSizesRequest}
*/
@javax.annotation.Generated("proto") public static final class GetSpaceQuotaRegionSizesRequest extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetSpaceQuotaRegionSizesRequest)
GetSpaceQuotaRegionSizesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSpaceQuotaRegionSizesRequest.newBuilder() to construct.
private GetSpaceQuotaRegionSizesRequest(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetSpaceQuotaRegionSizesRequest() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetSpaceQuotaRegionSizesRequest(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.Builder.class);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaRegionSizesRequest}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetSpaceQuotaRegionSizesRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequestOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetSpaceQuotaRegionSizesRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetSpaceQuotaRegionSizesRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaRegionSizesRequest>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<GetSpaceQuotaRegionSizesRequest>() {
public GetSpaceQuotaRegionSizesRequest parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new GetSpaceQuotaRegionSizesRequest(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaRegionSizesRequest> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaRegionSizesRequest> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSpaceQuotaRegionSizesResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetSpaceQuotaRegionSizesResponse)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes>
getSizesList();
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes getSizes(int index);
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
int getSizesCount();
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder>
getSizesOrBuilderList();
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder getSizesOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaRegionSizesResponse}
*/
@javax.annotation.Generated("proto") public static final class GetSpaceQuotaRegionSizesResponse extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetSpaceQuotaRegionSizesResponse)
GetSpaceQuotaRegionSizesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSpaceQuotaRegionSizesResponse.newBuilder() to construct.
private GetSpaceQuotaRegionSizesResponse(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetSpaceQuotaRegionSizesResponse() {
sizes_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetSpaceQuotaRegionSizesResponse(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
sizes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes>();
mutable_bitField0_ |= 0x00000001;
}
sizes_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.PARSER, extensionRegistry));
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
sizes_ = java.util.Collections.unmodifiableList(sizes_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.Builder.class);
}
public interface RegionSizesOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
boolean hasTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
/**
* <code>optional uint64 size = 2;</code>
*/
boolean hasSize();
/**
* <code>optional uint64 size = 2;</code>
*/
long getSize();
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes}
*/
@javax.annotation.Generated("proto") public static final class RegionSizes extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes)
RegionSizesOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegionSizes.newBuilder() to construct.
private RegionSizes(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RegionSizes() {
size_ = 0L;
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RegionSizes(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 16: {
bitField0_ |= 0x00000002;
size_ = input.readUInt64();
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder.class);
}
private int bitField0_;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
public static final int SIZE_FIELD_NUMBER = 2;
private long size_;
/**
* <code>optional uint64 size = 2;</code>
*/
public boolean hasSize() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 size = 2;</code>
*/
public long getSize() {
return size_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasTableName()) {
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, getTableName());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeUInt64(2, size_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getTableName());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeUInt64Size(2, size_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes) obj;
boolean result = true;
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && (hasSize() == other.hasSize());
if (hasSize()) {
result = result && (getSize()
== other.getSize());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (hasSize()) {
hash = (37 * hash) + SIZE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
getSize());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
tableName_ = null;
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
size_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.size_ = size_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.getDefaultInstance()) return this;
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (other.hasSize()) {
setSize(other.getSize());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (hasTableName()) {
if (!getTableName().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
tableName_ != null &&
tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = null;
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
getTableName(),
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
private long size_ ;
/**
* <code>optional uint64 size = 2;</code>
*/
public boolean hasSize() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 size = 2;</code>
*/
public long getSize() {
return size_;
}
/**
* <code>optional uint64 size = 2;</code>
*/
public Builder setSize(long value) {
bitField0_ |= 0x00000002;
size_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 size = 2;</code>
*/
public Builder clearSize() {
bitField0_ = (bitField0_ & ~0x00000002);
size_ = 0L;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<RegionSizes>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<RegionSizes>() {
public RegionSizes parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new RegionSizes(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<RegionSizes> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<RegionSizes> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int SIZES_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes> sizes_;
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes> getSizesList() {
return sizes_;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder>
getSizesOrBuilderList() {
return sizes_;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public int getSizesCount() {
return sizes_.size();
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes getSizes(int index) {
return sizes_.get(index);
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder getSizesOrBuilder(
int index) {
return sizes_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getSizesCount(); i++) {
if (!getSizes(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < sizes_.size(); i++) {
output.writeMessage(1, sizes_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < sizes_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, sizes_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse) obj;
boolean result = true;
result = result && getSizesList()
.equals(other.getSizesList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSizesCount() > 0) {
hash = (37 * hash) + SIZES_FIELD_NUMBER;
hash = (53 * hash) + getSizesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaRegionSizesResponse}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetSpaceQuotaRegionSizesResponse)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponseOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSizesFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (sizesBuilder_ == null) {
sizes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
sizesBuilder_.clear();
}
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse(this);
int from_bitField0_ = bitField0_;
if (sizesBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
sizes_ = java.util.Collections.unmodifiableList(sizes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.sizes_ = sizes_;
} else {
result.sizes_ = sizesBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.getDefaultInstance()) return this;
if (sizesBuilder_ == null) {
if (!other.sizes_.isEmpty()) {
if (sizes_.isEmpty()) {
sizes_ = other.sizes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSizesIsMutable();
sizes_.addAll(other.sizes_);
}
onChanged();
}
} else {
if (!other.sizes_.isEmpty()) {
if (sizesBuilder_.isEmpty()) {
sizesBuilder_.dispose();
sizesBuilder_ = null;
sizes_ = other.sizes_;
bitField0_ = (bitField0_ & ~0x00000001);
sizesBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSizesFieldBuilder() : null;
} else {
sizesBuilder_.addAllMessages(other.sizes_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getSizesCount(); i++) {
if (!getSizes(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes> sizes_ =
java.util.Collections.emptyList();
private void ensureSizesIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
sizes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes>(sizes_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder> sizesBuilder_;
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes> getSizesList() {
if (sizesBuilder_ == null) {
return java.util.Collections.unmodifiableList(sizes_);
} else {
return sizesBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public int getSizesCount() {
if (sizesBuilder_ == null) {
return sizes_.size();
} else {
return sizesBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes getSizes(int index) {
if (sizesBuilder_ == null) {
return sizes_.get(index);
} else {
return sizesBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder setSizes(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes value) {
if (sizesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSizesIsMutable();
sizes_.set(index, value);
onChanged();
} else {
sizesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder setSizes(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder builderForValue) {
if (sizesBuilder_ == null) {
ensureSizesIsMutable();
sizes_.set(index, builderForValue.build());
onChanged();
} else {
sizesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder addSizes(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes value) {
if (sizesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSizesIsMutable();
sizes_.add(value);
onChanged();
} else {
sizesBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder addSizes(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes value) {
if (sizesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSizesIsMutable();
sizes_.add(index, value);
onChanged();
} else {
sizesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder addSizes(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder builderForValue) {
if (sizesBuilder_ == null) {
ensureSizesIsMutable();
sizes_.add(builderForValue.build());
onChanged();
} else {
sizesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder addSizes(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder builderForValue) {
if (sizesBuilder_ == null) {
ensureSizesIsMutable();
sizes_.add(index, builderForValue.build());
onChanged();
} else {
sizesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder addAllSizes(
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes> values) {
if (sizesBuilder_ == null) {
ensureSizesIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, sizes_);
onChanged();
} else {
sizesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder clearSizes() {
if (sizesBuilder_ == null) {
sizes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
sizesBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public Builder removeSizes(int index) {
if (sizesBuilder_ == null) {
ensureSizesIsMutable();
sizes_.remove(index);
onChanged();
} else {
sizesBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder getSizesBuilder(
int index) {
return getSizesFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder getSizesOrBuilder(
int index) {
if (sizesBuilder_ == null) {
return sizes_.get(index); } else {
return sizesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder>
getSizesOrBuilderList() {
if (sizesBuilder_ != null) {
return sizesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sizes_);
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder addSizesBuilder() {
return getSizesFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder addSizesBuilder(
int index) {
return getSizesFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaRegionSizesResponse.RegionSizes sizes = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder>
getSizesBuilderList() {
return getSizesFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder>
getSizesFieldBuilder() {
if (sizesBuilder_ == null) {
sizesBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizesOrBuilder>(
sizes_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
sizes_ = null;
}
return sizesBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetSpaceQuotaRegionSizesResponse)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetSpaceQuotaRegionSizesResponse)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaRegionSizesResponse>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<GetSpaceQuotaRegionSizesResponse>() {
public GetSpaceQuotaRegionSizesResponse parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new GetSpaceQuotaRegionSizesResponse(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaRegionSizesResponse> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaRegionSizesResponse> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSpaceQuotaSnapshotsRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetSpaceQuotaSnapshotsRequest)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaSnapshotsRequest}
*/
@javax.annotation.Generated("proto") public static final class GetSpaceQuotaSnapshotsRequest extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetSpaceQuotaSnapshotsRequest)
GetSpaceQuotaSnapshotsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSpaceQuotaSnapshotsRequest.newBuilder() to construct.
private GetSpaceQuotaSnapshotsRequest(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetSpaceQuotaSnapshotsRequest() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetSpaceQuotaSnapshotsRequest(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest.Builder.class);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaSnapshotsRequest}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetSpaceQuotaSnapshotsRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequestOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetSpaceQuotaSnapshotsRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetSpaceQuotaSnapshotsRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaSnapshotsRequest>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<GetSpaceQuotaSnapshotsRequest>() {
public GetSpaceQuotaSnapshotsRequest parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new GetSpaceQuotaSnapshotsRequest(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaSnapshotsRequest> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaSnapshotsRequest> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetSpaceQuotaSnapshotsResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetSpaceQuotaSnapshotsResponse)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot>
getSnapshotsList();
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot getSnapshots(int index);
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
int getSnapshotsCount();
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder>
getSnapshotsOrBuilderList();
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder getSnapshotsOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaSnapshotsResponse}
*/
@javax.annotation.Generated("proto") public static final class GetSpaceQuotaSnapshotsResponse extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetSpaceQuotaSnapshotsResponse)
GetSpaceQuotaSnapshotsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetSpaceQuotaSnapshotsResponse.newBuilder() to construct.
private GetSpaceQuotaSnapshotsResponse(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetSpaceQuotaSnapshotsResponse() {
snapshots_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetSpaceQuotaSnapshotsResponse(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot>();
mutable_bitField0_ |= 0x00000001;
}
snapshots_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.PARSER, extensionRegistry));
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
snapshots_ = java.util.Collections.unmodifiableList(snapshots_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.Builder.class);
}
public interface TableQuotaSnapshotOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
boolean hasTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
boolean hasSnapshot();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder();
}
/**
* <pre>
* Cannot use TableName as a map key, do the repeated nested message by hand.
* </pre>
*
* Protobuf type {@code hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class TableQuotaSnapshot extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot)
TableQuotaSnapshotOrBuilder {
private static final long serialVersionUID = 0L;
// Use TableQuotaSnapshot.newBuilder() to construct.
private TableQuotaSnapshot(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TableQuotaSnapshot() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableQuotaSnapshot(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = snapshot_.toBuilder();
}
snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(snapshot_);
snapshot_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder.class);
}
private int bitField0_;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
public static final int SNAPSHOT_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot snapshot_;
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot() {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder() {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasTableName()) {
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, getTableName());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, getSnapshot());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getTableName());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getSnapshot());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot) obj;
boolean result = true;
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && (hasSnapshot() == other.hasSnapshot());
if (hasSnapshot()) {
result = result && getSnapshot()
.equals(other.getSnapshot());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (hasSnapshot()) {
hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
hash = (53 * hash) + getSnapshot().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Cannot use TableName as a map key, do the repeated nested message by hand.
* </pre>
*
* Protobuf type {@code hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
getSnapshotFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
tableName_ = null;
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (snapshotBuilder_ == null) {
snapshot_ = null;
} else {
snapshotBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (snapshotBuilder_ == null) {
result.snapshot_ = snapshot_;
} else {
result.snapshot_ = snapshotBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.getDefaultInstance()) return this;
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (other.hasSnapshot()) {
mergeSnapshot(other.getSnapshot());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (hasTableName()) {
if (!getTableName().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
tableName_ != null &&
tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = null;
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
getTableName(),
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot snapshot_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder> snapshotBuilder_;
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot() {
if (snapshotBuilder_ == null) {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
} else {
return snapshotBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot value) {
if (snapshotBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
snapshot_ = value;
onChanged();
} else {
snapshotBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder setSnapshot(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder builderForValue) {
if (snapshotBuilder_ == null) {
snapshot_ = builderForValue.build();
onChanged();
} else {
snapshotBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot value) {
if (snapshotBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
snapshot_ != null &&
snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance()) {
snapshot_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.newBuilder(snapshot_).mergeFrom(value).buildPartial();
} else {
snapshot_ = value;
}
onChanged();
} else {
snapshotBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder clearSnapshot() {
if (snapshotBuilder_ == null) {
snapshot_ = null;
onChanged();
} else {
snapshotBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder getSnapshotBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSnapshotFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder() {
if (snapshotBuilder_ != null) {
return snapshotBuilder_.getMessageOrBuilder();
} else {
return snapshot_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder>
getSnapshotFieldBuilder() {
if (snapshotBuilder_ == null) {
snapshotBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder>(
getSnapshot(),
getParentForChildren(),
isClean());
snapshot_ = null;
}
return snapshotBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<TableQuotaSnapshot>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<TableQuotaSnapshot>() {
public TableQuotaSnapshot parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new TableQuotaSnapshot(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<TableQuotaSnapshot> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<TableQuotaSnapshot> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int SNAPSHOTS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot> snapshots_;
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot> getSnapshotsList() {
return snapshots_;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder>
getSnapshotsOrBuilderList() {
return snapshots_;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public int getSnapshotsCount() {
return snapshots_.size();
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot getSnapshots(int index) {
return snapshots_.get(index);
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder getSnapshotsOrBuilder(
int index) {
return snapshots_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getSnapshotsCount(); i++) {
if (!getSnapshots(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < snapshots_.size(); i++) {
output.writeMessage(1, snapshots_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < snapshots_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, snapshots_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse) obj;
boolean result = true;
result = result && getSnapshotsList()
.equals(other.getSnapshotsList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSnapshotsCount() > 0) {
hash = (37 * hash) + SNAPSHOTS_FIELD_NUMBER;
hash = (53 * hash) + getSnapshotsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetSpaceQuotaSnapshotsResponse}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetSpaceQuotaSnapshotsResponse)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponseOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSnapshotsFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (snapshotsBuilder_ == null) {
snapshots_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
snapshotsBuilder_.clear();
}
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse(this);
int from_bitField0_ = bitField0_;
if (snapshotsBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
snapshots_ = java.util.Collections.unmodifiableList(snapshots_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.snapshots_ = snapshots_;
} else {
result.snapshots_ = snapshotsBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.getDefaultInstance()) return this;
if (snapshotsBuilder_ == null) {
if (!other.snapshots_.isEmpty()) {
if (snapshots_.isEmpty()) {
snapshots_ = other.snapshots_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSnapshotsIsMutable();
snapshots_.addAll(other.snapshots_);
}
onChanged();
}
} else {
if (!other.snapshots_.isEmpty()) {
if (snapshotsBuilder_.isEmpty()) {
snapshotsBuilder_.dispose();
snapshotsBuilder_ = null;
snapshots_ = other.snapshots_;
bitField0_ = (bitField0_ & ~0x00000001);
snapshotsBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSnapshotsFieldBuilder() : null;
} else {
snapshotsBuilder_.addAllMessages(other.snapshots_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getSnapshotsCount(); i++) {
if (!getSnapshots(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot> snapshots_ =
java.util.Collections.emptyList();
private void ensureSnapshotsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot>(snapshots_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder> snapshotsBuilder_;
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot> getSnapshotsList() {
if (snapshotsBuilder_ == null) {
return java.util.Collections.unmodifiableList(snapshots_);
} else {
return snapshotsBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public int getSnapshotsCount() {
if (snapshotsBuilder_ == null) {
return snapshots_.size();
} else {
return snapshotsBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot getSnapshots(int index) {
if (snapshotsBuilder_ == null) {
return snapshots_.get(index);
} else {
return snapshotsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder setSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot value) {
if (snapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSnapshotsIsMutable();
snapshots_.set(index, value);
onChanged();
} else {
snapshotsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder setSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder builderForValue) {
if (snapshotsBuilder_ == null) {
ensureSnapshotsIsMutable();
snapshots_.set(index, builderForValue.build());
onChanged();
} else {
snapshotsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder addSnapshots(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot value) {
if (snapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSnapshotsIsMutable();
snapshots_.add(value);
onChanged();
} else {
snapshotsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder addSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot value) {
if (snapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSnapshotsIsMutable();
snapshots_.add(index, value);
onChanged();
} else {
snapshotsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder addSnapshots(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder builderForValue) {
if (snapshotsBuilder_ == null) {
ensureSnapshotsIsMutable();
snapshots_.add(builderForValue.build());
onChanged();
} else {
snapshotsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder addSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder builderForValue) {
if (snapshotsBuilder_ == null) {
ensureSnapshotsIsMutable();
snapshots_.add(index, builderForValue.build());
onChanged();
} else {
snapshotsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder addAllSnapshots(
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot> values) {
if (snapshotsBuilder_ == null) {
ensureSnapshotsIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, snapshots_);
onChanged();
} else {
snapshotsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder clearSnapshots() {
if (snapshotsBuilder_ == null) {
snapshots_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
snapshotsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public Builder removeSnapshots(int index) {
if (snapshotsBuilder_ == null) {
ensureSnapshotsIsMutable();
snapshots_.remove(index);
onChanged();
} else {
snapshotsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder getSnapshotsBuilder(
int index) {
return getSnapshotsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder getSnapshotsOrBuilder(
int index) {
if (snapshotsBuilder_ == null) {
return snapshots_.get(index); } else {
return snapshotsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder>
getSnapshotsOrBuilderList() {
if (snapshotsBuilder_ != null) {
return snapshotsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(snapshots_);
}
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder addSnapshotsBuilder() {
return getSnapshotsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder addSnapshotsBuilder(
int index) {
return getSnapshotsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot snapshots = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder>
getSnapshotsBuilderList() {
return getSnapshotsFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder>
getSnapshotsFieldBuilder() {
if (snapshotsBuilder_ == null) {
snapshotsBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshotOrBuilder>(
snapshots_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
snapshots_ = null;
}
return snapshotsBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetSpaceQuotaSnapshotsResponse)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetSpaceQuotaSnapshotsResponse)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaSnapshotsResponse>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<GetSpaceQuotaSnapshotsResponse>() {
public GetSpaceQuotaSnapshotsResponse parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new GetSpaceQuotaSnapshotsResponse(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaSnapshotsResponse> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetSpaceQuotaSnapshotsResponse> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetQuotaStatesRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetQuotaStatesRequest)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesRequest}
*/
@javax.annotation.Generated("proto") public static final class GetQuotaStatesRequest extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetQuotaStatesRequest)
GetQuotaStatesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetQuotaStatesRequest.newBuilder() to construct.
private GetQuotaStatesRequest(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetQuotaStatesRequest() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetQuotaStatesRequest(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest.Builder.class);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesRequest}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetQuotaStatesRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequestOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesRequest_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesRequest_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetQuotaStatesRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetQuotaStatesRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetQuotaStatesRequest>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<GetQuotaStatesRequest>() {
public GetQuotaStatesRequest parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new GetQuotaStatesRequest(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetQuotaStatesRequest> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetQuotaStatesRequest> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetQuotaStatesResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetQuotaStatesResponse)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot>
getTableSnapshotsList();
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot getTableSnapshots(int index);
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
int getTableSnapshotsCount();
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder>
getTableSnapshotsOrBuilderList();
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder getTableSnapshotsOrBuilder(
int index);
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot>
getNsSnapshotsList();
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot getNsSnapshots(int index);
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
int getNsSnapshotsCount();
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder>
getNsSnapshotsOrBuilderList();
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder getNsSnapshotsOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesResponse}
*/
@javax.annotation.Generated("proto") public static final class GetQuotaStatesResponse extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetQuotaStatesResponse)
GetQuotaStatesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetQuotaStatesResponse.newBuilder() to construct.
private GetQuotaStatesResponse(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetQuotaStatesResponse() {
tableSnapshots_ = java.util.Collections.emptyList();
nsSnapshots_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetQuotaStatesResponse(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableSnapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot>();
mutable_bitField0_ |= 0x00000001;
}
tableSnapshots_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.PARSER, extensionRegistry));
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
nsSnapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot>();
mutable_bitField0_ |= 0x00000002;
}
nsSnapshots_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.PARSER, extensionRegistry));
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableSnapshots_ = java.util.Collections.unmodifiableList(tableSnapshots_);
}
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
nsSnapshots_ = java.util.Collections.unmodifiableList(nsSnapshots_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.Builder.class);
}
public interface TableQuotaSnapshotOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
boolean hasTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
boolean hasSnapshot();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class TableQuotaSnapshot extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot)
TableQuotaSnapshotOrBuilder {
private static final long serialVersionUID = 0L;
// Use TableQuotaSnapshot.newBuilder() to construct.
private TableQuotaSnapshot(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TableQuotaSnapshot() {
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableQuotaSnapshot(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = snapshot_.toBuilder();
}
snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(snapshot_);
snapshot_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder.class);
}
private int bitField0_;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
public static final int SNAPSHOT_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot snapshot_;
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot() {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder() {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasTableName()) {
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, getTableName());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, getSnapshot());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getTableName());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getSnapshot());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot) obj;
boolean result = true;
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && (hasSnapshot() == other.hasSnapshot());
if (hasSnapshot()) {
result = result && getSnapshot()
.equals(other.getSnapshot());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (hasSnapshot()) {
hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
hash = (53 * hash) + getSnapshot().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
getSnapshotFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
tableName_ = null;
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (snapshotBuilder_ == null) {
snapshot_ = null;
} else {
snapshotBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (snapshotBuilder_ == null) {
result.snapshot_ = snapshot_;
} else {
result.snapshot_ = snapshotBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.getDefaultInstance()) return this;
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (other.hasSnapshot()) {
mergeSnapshot(other.getSnapshot());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (hasTableName()) {
if (!getTableName().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
tableName_ != null &&
tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = null;
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
getTableName(),
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot snapshot_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder> snapshotBuilder_;
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot() {
if (snapshotBuilder_ == null) {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
} else {
return snapshotBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot value) {
if (snapshotBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
snapshot_ = value;
onChanged();
} else {
snapshotBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder setSnapshot(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder builderForValue) {
if (snapshotBuilder_ == null) {
snapshot_ = builderForValue.build();
onChanged();
} else {
snapshotBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot value) {
if (snapshotBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
snapshot_ != null &&
snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance()) {
snapshot_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.newBuilder(snapshot_).mergeFrom(value).buildPartial();
} else {
snapshot_ = value;
}
onChanged();
} else {
snapshotBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder clearSnapshot() {
if (snapshotBuilder_ == null) {
snapshot_ = null;
onChanged();
} else {
snapshotBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder getSnapshotBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSnapshotFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder() {
if (snapshotBuilder_ != null) {
return snapshotBuilder_.getMessageOrBuilder();
} else {
return snapshot_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder>
getSnapshotFieldBuilder() {
if (snapshotBuilder_ == null) {
snapshotBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder>(
getSnapshot(),
getParentForChildren(),
isClean());
snapshot_ = null;
}
return snapshotBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<TableQuotaSnapshot>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<TableQuotaSnapshot>() {
public TableQuotaSnapshot parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new TableQuotaSnapshot(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<TableQuotaSnapshot> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<TableQuotaSnapshot> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NamespaceQuotaSnapshotOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot)
org.apache.hbase.thirdparty.com.google.protobuf.MessageOrBuilder {
/**
* <code>optional string namespace = 1;</code>
*/
boolean hasNamespace();
/**
* <code>optional string namespace = 1;</code>
*/
java.lang.String getNamespace();
/**
* <code>optional string namespace = 1;</code>
*/
org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
boolean hasSnapshot();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot();
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class NamespaceQuotaSnapshot extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot)
NamespaceQuotaSnapshotOrBuilder {
private static final long serialVersionUID = 0L;
// Use NamespaceQuotaSnapshot.newBuilder() to construct.
private NamespaceQuotaSnapshot(org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private NamespaceQuotaSnapshot() {
namespace_ = "";
}
@java.lang.Override
public final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private NamespaceQuotaSnapshot(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
namespace_ = bs;
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = snapshot_.toBuilder();
}
snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(snapshot_);
snapshot_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder.class);
}
private int bitField0_;
public static final int NAMESPACE_FIELD_NUMBER = 1;
private volatile java.lang.Object namespace_;
/**
* <code>optional string namespace = 1;</code>
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string namespace = 1;</code>
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
}
}
/**
* <code>optional string namespace = 1;</code>
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
public static final int SNAPSHOT_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot snapshot_;
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot() {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder() {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namespace_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, getSnapshot());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namespace_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getSnapshot());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot) obj;
boolean result = true;
result = result && (hasNamespace() == other.hasNamespace());
if (hasNamespace()) {
result = result && getNamespace()
.equals(other.getNamespace());
}
result = result && (hasSnapshot() == other.hasSnapshot());
if (hasSnapshot()) {
result = result && getSnapshot()
.equals(other.getSnapshot());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNamespace()) {
hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
hash = (53 * hash) + getNamespace().hashCode();
}
if (hasSnapshot()) {
hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
hash = (53 * hash) + getSnapshot().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSnapshotFieldBuilder();
}
}
public Builder clear() {
super.clear();
namespace_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (snapshotBuilder_ == null) {
snapshot_ = null;
} else {
snapshotBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.namespace_ = namespace_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (snapshotBuilder_ == null) {
result.snapshot_ = snapshot_;
} else {
result.snapshot_ = snapshotBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.getDefaultInstance()) return this;
if (other.hasNamespace()) {
bitField0_ |= 0x00000001;
namespace_ = other.namespace_;
onChanged();
}
if (other.hasSnapshot()) {
mergeSnapshot(other.getSnapshot());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object namespace_ = "";
/**
* <code>optional string namespace = 1;</code>
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string namespace = 1;</code>
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (!(ref instanceof java.lang.String)) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs =
(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string namespace = 1;</code>
*/
public org.apache.hbase.thirdparty.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof String) {
org.apache.hbase.thirdparty.com.google.protobuf.ByteString b =
org.apache.hbase.thirdparty.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string namespace = 1;</code>
*/
public Builder setNamespace(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
namespace_ = value;
onChanged();
return this;
}
/**
* <code>optional string namespace = 1;</code>
*/
public Builder clearNamespace() {
bitField0_ = (bitField0_ & ~0x00000001);
namespace_ = getDefaultInstance().getNamespace();
onChanged();
return this;
}
/**
* <code>optional string namespace = 1;</code>
*/
public Builder setNamespaceBytes(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
namespace_ = value;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot snapshot_ = null;
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder> snapshotBuilder_;
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot getSnapshot() {
if (snapshotBuilder_ == null) {
return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
} else {
return snapshotBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot value) {
if (snapshotBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
snapshot_ = value;
onChanged();
} else {
snapshotBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder setSnapshot(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder builderForValue) {
if (snapshotBuilder_ == null) {
snapshot_ = builderForValue.build();
onChanged();
} else {
snapshotBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot value) {
if (snapshotBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
snapshot_ != null &&
snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance()) {
snapshot_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.newBuilder(snapshot_).mergeFrom(value).buildPartial();
} else {
snapshot_ = value;
}
onChanged();
} else {
snapshotBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public Builder clearSnapshot() {
if (snapshotBuilder_ == null) {
snapshot_ = null;
onChanged();
} else {
snapshotBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder getSnapshotBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSnapshotFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder getSnapshotOrBuilder() {
if (snapshotBuilder_ != null) {
return snapshotBuilder_.getMessageOrBuilder();
} else {
return snapshot_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.getDefaultInstance() : snapshot_;
}
}
/**
* <code>optional .hbase.pb.SpaceQuotaSnapshot snapshot = 2;</code>
*/
private org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder>
getSnapshotFieldBuilder() {
if (snapshotBuilder_ == null) {
snapshotBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshotOrBuilder>(
getSnapshot(),
getParentForChildren(),
isClean());
snapshot_ = null;
}
return snapshotBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<NamespaceQuotaSnapshot>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<NamespaceQuotaSnapshot>() {
public NamespaceQuotaSnapshot parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new NamespaceQuotaSnapshot(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<NamespaceQuotaSnapshot> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<NamespaceQuotaSnapshot> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int TABLE_SNAPSHOTS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot> tableSnapshots_;
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot> getTableSnapshotsList() {
return tableSnapshots_;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder>
getTableSnapshotsOrBuilderList() {
return tableSnapshots_;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public int getTableSnapshotsCount() {
return tableSnapshots_.size();
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot getTableSnapshots(int index) {
return tableSnapshots_.get(index);
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder getTableSnapshotsOrBuilder(
int index) {
return tableSnapshots_.get(index);
}
public static final int NS_SNAPSHOTS_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot> nsSnapshots_;
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot> getNsSnapshotsList() {
return nsSnapshots_;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder>
getNsSnapshotsOrBuilderList() {
return nsSnapshots_;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public int getNsSnapshotsCount() {
return nsSnapshots_.size();
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot getNsSnapshots(int index) {
return nsSnapshots_.get(index);
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder getNsSnapshotsOrBuilder(
int index) {
return nsSnapshots_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getTableSnapshotsCount(); i++) {
if (!getTableSnapshots(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < tableSnapshots_.size(); i++) {
output.writeMessage(1, tableSnapshots_.get(i));
}
for (int i = 0; i < nsSnapshots_.size(); i++) {
output.writeMessage(2, nsSnapshots_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tableSnapshots_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, tableSnapshots_.get(i));
}
for (int i = 0; i < nsSnapshots_.size(); i++) {
size += org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, nsSnapshots_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse) obj;
boolean result = true;
result = result && getTableSnapshotsList()
.equals(other.getTableSnapshotsList());
result = result && getNsSnapshotsList()
.equals(other.getNsSnapshotsList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTableSnapshotsCount() > 0) {
hash = (37 * hash) + TABLE_SNAPSHOTS_FIELD_NUMBER;
hash = (53 * hash) + getTableSnapshotsList().hashCode();
}
if (getNsSnapshotsCount() > 0) {
hash = (37 * hash) + NS_SNAPSHOTS_FIELD_NUMBER;
hash = (53 * hash) + getNsSnapshotsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.ByteString data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(byte[] data)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
byte[] data,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseDelimitedFrom(
java.io.InputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetQuotaStatesResponse}
*/
@javax.annotation.Generated("proto") public static final class Builder extends
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.GetQuotaStatesResponse)
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponseOrBuilder {
public static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_descriptor;
}
protected org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableSnapshotsFieldBuilder();
getNsSnapshotsFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (tableSnapshotsBuilder_ == null) {
tableSnapshots_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
tableSnapshotsBuilder_.clear();
}
if (nsSnapshotsBuilder_ == null) {
nsSnapshots_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
nsSnapshotsBuilder_.clear();
}
return this;
}
public org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetQuotaStatesResponse_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse(this);
int from_bitField0_ = bitField0_;
if (tableSnapshotsBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableSnapshots_ = java.util.Collections.unmodifiableList(tableSnapshots_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableSnapshots_ = tableSnapshots_;
} else {
result.tableSnapshots_ = tableSnapshotsBuilder_.build();
}
if (nsSnapshotsBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
nsSnapshots_ = java.util.Collections.unmodifiableList(nsSnapshots_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.nsSnapshots_ = nsSnapshots_;
} else {
result.nsSnapshots_ = nsSnapshotsBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hbase.thirdparty.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.getDefaultInstance()) return this;
if (tableSnapshotsBuilder_ == null) {
if (!other.tableSnapshots_.isEmpty()) {
if (tableSnapshots_.isEmpty()) {
tableSnapshots_ = other.tableSnapshots_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableSnapshotsIsMutable();
tableSnapshots_.addAll(other.tableSnapshots_);
}
onChanged();
}
} else {
if (!other.tableSnapshots_.isEmpty()) {
if (tableSnapshotsBuilder_.isEmpty()) {
tableSnapshotsBuilder_.dispose();
tableSnapshotsBuilder_ = null;
tableSnapshots_ = other.tableSnapshots_;
bitField0_ = (bitField0_ & ~0x00000001);
tableSnapshotsBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getTableSnapshotsFieldBuilder() : null;
} else {
tableSnapshotsBuilder_.addAllMessages(other.tableSnapshots_);
}
}
}
if (nsSnapshotsBuilder_ == null) {
if (!other.nsSnapshots_.isEmpty()) {
if (nsSnapshots_.isEmpty()) {
nsSnapshots_ = other.nsSnapshots_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureNsSnapshotsIsMutable();
nsSnapshots_.addAll(other.nsSnapshots_);
}
onChanged();
}
} else {
if (!other.nsSnapshots_.isEmpty()) {
if (nsSnapshotsBuilder_.isEmpty()) {
nsSnapshotsBuilder_.dispose();
nsSnapshotsBuilder_ = null;
nsSnapshots_ = other.nsSnapshots_;
bitField0_ = (bitField0_ & ~0x00000002);
nsSnapshotsBuilder_ =
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNsSnapshotsFieldBuilder() : null;
} else {
nsSnapshotsBuilder_.addAllMessages(other.nsSnapshots_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getTableSnapshotsCount(); i++) {
if (!getTableSnapshots(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot> tableSnapshots_ =
java.util.Collections.emptyList();
private void ensureTableSnapshotsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableSnapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot>(tableSnapshots_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder> tableSnapshotsBuilder_;
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot> getTableSnapshotsList() {
if (tableSnapshotsBuilder_ == null) {
return java.util.Collections.unmodifiableList(tableSnapshots_);
} else {
return tableSnapshotsBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public int getTableSnapshotsCount() {
if (tableSnapshotsBuilder_ == null) {
return tableSnapshots_.size();
} else {
return tableSnapshotsBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot getTableSnapshots(int index) {
if (tableSnapshotsBuilder_ == null) {
return tableSnapshots_.get(index);
} else {
return tableSnapshotsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder setTableSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot value) {
if (tableSnapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableSnapshotsIsMutable();
tableSnapshots_.set(index, value);
onChanged();
} else {
tableSnapshotsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder setTableSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder builderForValue) {
if (tableSnapshotsBuilder_ == null) {
ensureTableSnapshotsIsMutable();
tableSnapshots_.set(index, builderForValue.build());
onChanged();
} else {
tableSnapshotsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder addTableSnapshots(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot value) {
if (tableSnapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableSnapshotsIsMutable();
tableSnapshots_.add(value);
onChanged();
} else {
tableSnapshotsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder addTableSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot value) {
if (tableSnapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableSnapshotsIsMutable();
tableSnapshots_.add(index, value);
onChanged();
} else {
tableSnapshotsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder addTableSnapshots(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder builderForValue) {
if (tableSnapshotsBuilder_ == null) {
ensureTableSnapshotsIsMutable();
tableSnapshots_.add(builderForValue.build());
onChanged();
} else {
tableSnapshotsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder addTableSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder builderForValue) {
if (tableSnapshotsBuilder_ == null) {
ensureTableSnapshotsIsMutable();
tableSnapshots_.add(index, builderForValue.build());
onChanged();
} else {
tableSnapshotsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder addAllTableSnapshots(
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot> values) {
if (tableSnapshotsBuilder_ == null) {
ensureTableSnapshotsIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, tableSnapshots_);
onChanged();
} else {
tableSnapshotsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder clearTableSnapshots() {
if (tableSnapshotsBuilder_ == null) {
tableSnapshots_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tableSnapshotsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public Builder removeTableSnapshots(int index) {
if (tableSnapshotsBuilder_ == null) {
ensureTableSnapshotsIsMutable();
tableSnapshots_.remove(index);
onChanged();
} else {
tableSnapshotsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder getTableSnapshotsBuilder(
int index) {
return getTableSnapshotsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder getTableSnapshotsOrBuilder(
int index) {
if (tableSnapshotsBuilder_ == null) {
return tableSnapshots_.get(index); } else {
return tableSnapshotsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder>
getTableSnapshotsOrBuilderList() {
if (tableSnapshotsBuilder_ != null) {
return tableSnapshotsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tableSnapshots_);
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder addTableSnapshotsBuilder() {
return getTableSnapshotsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder addTableSnapshotsBuilder(
int index) {
return getTableSnapshotsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.TableQuotaSnapshot table_snapshots = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder>
getTableSnapshotsBuilderList() {
return getTableSnapshotsFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder>
getTableSnapshotsFieldBuilder() {
if (tableSnapshotsBuilder_ == null) {
tableSnapshotsBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshotOrBuilder>(
tableSnapshots_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
tableSnapshots_ = null;
}
return tableSnapshotsBuilder_;
}
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot> nsSnapshots_ =
java.util.Collections.emptyList();
private void ensureNsSnapshotsIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
nsSnapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot>(nsSnapshots_);
bitField0_ |= 0x00000002;
}
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder> nsSnapshotsBuilder_;
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot> getNsSnapshotsList() {
if (nsSnapshotsBuilder_ == null) {
return java.util.Collections.unmodifiableList(nsSnapshots_);
} else {
return nsSnapshotsBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public int getNsSnapshotsCount() {
if (nsSnapshotsBuilder_ == null) {
return nsSnapshots_.size();
} else {
return nsSnapshotsBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot getNsSnapshots(int index) {
if (nsSnapshotsBuilder_ == null) {
return nsSnapshots_.get(index);
} else {
return nsSnapshotsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder setNsSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot value) {
if (nsSnapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNsSnapshotsIsMutable();
nsSnapshots_.set(index, value);
onChanged();
} else {
nsSnapshotsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder setNsSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder builderForValue) {
if (nsSnapshotsBuilder_ == null) {
ensureNsSnapshotsIsMutable();
nsSnapshots_.set(index, builderForValue.build());
onChanged();
} else {
nsSnapshotsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder addNsSnapshots(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot value) {
if (nsSnapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNsSnapshotsIsMutable();
nsSnapshots_.add(value);
onChanged();
} else {
nsSnapshotsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder addNsSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot value) {
if (nsSnapshotsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNsSnapshotsIsMutable();
nsSnapshots_.add(index, value);
onChanged();
} else {
nsSnapshotsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder addNsSnapshots(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder builderForValue) {
if (nsSnapshotsBuilder_ == null) {
ensureNsSnapshotsIsMutable();
nsSnapshots_.add(builderForValue.build());
onChanged();
} else {
nsSnapshotsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder addNsSnapshots(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder builderForValue) {
if (nsSnapshotsBuilder_ == null) {
ensureNsSnapshotsIsMutable();
nsSnapshots_.add(index, builderForValue.build());
onChanged();
} else {
nsSnapshotsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder addAllNsSnapshots(
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot> values) {
if (nsSnapshotsBuilder_ == null) {
ensureNsSnapshotsIsMutable();
org.apache.hbase.thirdparty.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, nsSnapshots_);
onChanged();
} else {
nsSnapshotsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder clearNsSnapshots() {
if (nsSnapshotsBuilder_ == null) {
nsSnapshots_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
nsSnapshotsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public Builder removeNsSnapshots(int index) {
if (nsSnapshotsBuilder_ == null) {
ensureNsSnapshotsIsMutable();
nsSnapshots_.remove(index);
onChanged();
} else {
nsSnapshotsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder getNsSnapshotsBuilder(
int index) {
return getNsSnapshotsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder getNsSnapshotsOrBuilder(
int index) {
if (nsSnapshotsBuilder_ == null) {
return nsSnapshots_.get(index); } else {
return nsSnapshotsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder>
getNsSnapshotsOrBuilderList() {
if (nsSnapshotsBuilder_ != null) {
return nsSnapshotsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(nsSnapshots_);
}
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder addNsSnapshotsBuilder() {
return getNsSnapshotsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder addNsSnapshotsBuilder(
int index) {
return getNsSnapshotsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.GetQuotaStatesResponse.NamespaceQuotaSnapshot ns_snapshots = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder>
getNsSnapshotsBuilderList() {
return getNsSnapshotsFieldBuilder().getBuilderList();
}
private org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder>
getNsSnapshotsFieldBuilder() {
if (nsSnapshotsBuilder_ == null) {
nsSnapshotsBuilder_ = new org.apache.hbase.thirdparty.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshotOrBuilder>(
nsSnapshots_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
nsSnapshots_ = null;
}
return nsSnapshotsBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hbase.thirdparty.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetQuotaStatesResponse)
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetQuotaStatesResponse)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetQuotaStatesResponse>
PARSER = new org.apache.hbase.thirdparty.com.google.protobuf.AbstractParser<GetQuotaStatesResponse>() {
public GetQuotaStatesResponse parsePartialFrom(
org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream input,
org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException {
return new GetQuotaStatesResponse(input, extensionRegistry);
}
};
public static org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetQuotaStatesResponse> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hbase.thirdparty.com.google.protobuf.Parser<GetQuotaStatesResponse> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_TimedQuota_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_TimedQuota_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Throttle_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_Throttle_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ThrottleRequest_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Quotas_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_Quotas_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_QuotaUsage_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_QuotaUsage_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SpaceQuota_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_SpaceQuota_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SpaceLimitRequest_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_SpaceLimitRequest_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SpaceQuotaStatus_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_SpaceQuotaStatus_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SpaceQuotaSnapshot_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_SpaceQuotaSnapshot_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetQuotaStatesRequest_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetQuotaStatesRequest_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetQuotaStatesResponse_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetQuotaStatesResponse_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_fieldAccessorTable;
private static final org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_descriptor;
private static final
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_fieldAccessorTable;
public static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\013Quota.proto\022\010hbase.pb\032\013HBase.proto\"\204\001\n" +
"\nTimedQuota\022%\n\ttime_unit\030\001 \002(\0162\022.hbase.p" +
"b.TimeUnit\022\022\n\nsoft_limit\030\002 \001(\004\022\r\n\005share\030" +
"\003 \001(\002\022,\n\005scope\030\004 \001(\0162\024.hbase.pb.QuotaSco" +
"pe:\007MACHINE\"\223\003\n\010Throttle\022%\n\007req_num\030\001 \001(" +
"\0132\024.hbase.pb.TimedQuota\022&\n\010req_size\030\002 \001(" +
"\0132\024.hbase.pb.TimedQuota\022\'\n\twrite_num\030\003 \001" +
"(\0132\024.hbase.pb.TimedQuota\022(\n\nwrite_size\030\004" +
" \001(\0132\024.hbase.pb.TimedQuota\022&\n\010read_num\030\005" +
" \001(\0132\024.hbase.pb.TimedQuota\022\'\n\tread_size\030" +
"\006 \001(\0132\024.hbase.pb.TimedQuota\022/\n\021req_capac" +
"ity_unit\030\007 \001(\0132\024.hbase.pb.TimedQuota\0221\n\023" +
"write_capacity_unit\030\010 \001(\0132\024.hbase.pb.Tim" +
"edQuota\0220\n\022read_capacity_unit\030\t \001(\0132\024.hb" +
"ase.pb.TimedQuota\"b\n\017ThrottleRequest\022$\n\004" +
"type\030\001 \001(\0162\026.hbase.pb.ThrottleType\022)\n\013ti" +
"med_quota\030\002 \001(\0132\024.hbase.pb.TimedQuota\"r\n" +
"\006Quotas\022\035\n\016bypass_globals\030\001 \001(\010:\005false\022$" +
"\n\010throttle\030\002 \001(\0132\022.hbase.pb.Throttle\022#\n\005" +
"space\030\003 \001(\0132\024.hbase.pb.SpaceQuota\"\014\n\nQuo" +
"taUsage\"q\n\nSpaceQuota\022\022\n\nsoft_limit\030\001 \001(" +
"\004\0228\n\020violation_policy\030\002 \001(\0162\036.hbase.pb.S" +
"paceViolationPolicy\022\025\n\006remove\030\003 \001(\010:\005fal" +
"se\"8\n\021SpaceLimitRequest\022#\n\005quota\030\001 \001(\0132\024" +
".hbase.pb.SpaceQuota\"b\n\020SpaceQuotaStatus" +
"\0228\n\020violation_policy\030\001 \001(\0162\036.hbase.pb.Sp" +
"aceViolationPolicy\022\024\n\014in_violation\030\002 \001(\010" +
"\"p\n\022SpaceQuotaSnapshot\0220\n\014quota_status\030\001" +
" \001(\0132\032.hbase.pb.SpaceQuotaStatus\022\023\n\013quot" +
"a_usage\030\002 \001(\004\022\023\n\013quota_limit\030\003 \001(\004\"!\n\037Ge" +
"tSpaceQuotaRegionSizesRequest\"\257\001\n GetSpa" +
"ceQuotaRegionSizesResponse\022E\n\005sizes\030\001 \003(" +
"\01326.hbase.pb.GetSpaceQuotaRegionSizesRes" +
"ponse.RegionSizes\032D\n\013RegionSizes\022\'\n\ntabl" +
"e_name\030\001 \001(\0132\023.hbase.pb.TableName\022\014\n\004siz" +
"e\030\002 \001(\004\"\037\n\035GetSpaceQuotaSnapshotsRequest" +
"\"\337\001\n\036GetSpaceQuotaSnapshotsResponse\022N\n\ts" +
"napshots\030\001 \003(\0132;.hbase.pb.GetSpaceQuotaS" +
"napshotsResponse.TableQuotaSnapshot\032m\n\022T" +
"ableQuotaSnapshot\022\'\n\ntable_name\030\001 \001(\0132\023." +
"hbase.pb.TableName\022.\n\010snapshot\030\002 \001(\0132\034.h" +
"base.pb.SpaceQuotaSnapshot\"\027\n\025GetQuotaSt" +
"atesRequest\"\201\003\n\026GetQuotaStatesResponse\022L" +
"\n\017table_snapshots\030\001 \003(\01323.hbase.pb.GetQu" +
"otaStatesResponse.TableQuotaSnapshot\022M\n\014" +
"ns_snapshots\030\002 \003(\01327.hbase.pb.GetQuotaSt" +
"atesResponse.NamespaceQuotaSnapshot\032m\n\022T" +
"ableQuotaSnapshot\022\'\n\ntable_name\030\001 \001(\0132\023." +
"hbase.pb.TableName\022.\n\010snapshot\030\002 \001(\0132\034.h" +
"base.pb.SpaceQuotaSnapshot\032[\n\026NamespaceQ" +
"uotaSnapshot\022\021\n\tnamespace\030\001 \001(\t\022.\n\010snaps" +
"hot\030\002 \001(\0132\034.hbase.pb.SpaceQuotaSnapshot*" +
"&\n\nQuotaScope\022\013\n\007CLUSTER\020\001\022\013\n\007MACHINE\020\002*" +
"\302\001\n\014ThrottleType\022\022\n\016REQUEST_NUMBER\020\001\022\020\n\014" +
"REQUEST_SIZE\020\002\022\020\n\014WRITE_NUMBER\020\003\022\016\n\nWRIT" +
"E_SIZE\020\004\022\017\n\013READ_NUMBER\020\005\022\r\n\tREAD_SIZE\020\006" +
"\022\031\n\025REQUEST_CAPACITY_UNIT\020\007\022\027\n\023WRITE_CAP" +
"ACITY_UNIT\020\010\022\026\n\022READ_CAPACITY_UNIT\020\t*$\n\t" +
"QuotaType\022\014\n\010THROTTLE\020\001\022\t\n\005SPACE\020\002*]\n\024Sp" +
"aceViolationPolicy\022\013\n\007DISABLE\020\001\022\031\n\025NO_WR" +
"ITES_COMPACTIONS\020\002\022\r\n\tNO_WRITES\020\003\022\016\n\nNO_" +
"INSERTS\020\004BH\n1org.apache.hadoop.hbase.sha" +
"ded.protobuf.generatedB\013QuotaProtosH\001\210\001\001" +
"\240\001\001"
};
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public org.apache.hbase.thirdparty.com.google.protobuf.ExtensionRegistry assignDescriptors(
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(),
}, assigner);
internal_static_hbase_pb_TimedQuota_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_TimedQuota_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_TimedQuota_descriptor,
new java.lang.String[] { "TimeUnit", "SoftLimit", "Share", "Scope", });
internal_static_hbase_pb_Throttle_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_Throttle_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_Throttle_descriptor,
new java.lang.String[] { "ReqNum", "ReqSize", "WriteNum", "WriteSize", "ReadNum", "ReadSize", "ReqCapacityUnit", "WriteCapacityUnit", "ReadCapacityUnit", });
internal_static_hbase_pb_ThrottleRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_ThrottleRequest_descriptor,
new java.lang.String[] { "Type", "TimedQuota", });
internal_static_hbase_pb_Quotas_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hbase_pb_Quotas_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_Quotas_descriptor,
new java.lang.String[] { "BypassGlobals", "Throttle", "Space", });
internal_static_hbase_pb_QuotaUsage_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hbase_pb_QuotaUsage_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_QuotaUsage_descriptor,
new java.lang.String[] { });
internal_static_hbase_pb_SpaceQuota_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hbase_pb_SpaceQuota_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_SpaceQuota_descriptor,
new java.lang.String[] { "SoftLimit", "ViolationPolicy", "Remove", });
internal_static_hbase_pb_SpaceLimitRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hbase_pb_SpaceLimitRequest_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_SpaceLimitRequest_descriptor,
new java.lang.String[] { "Quota", });
internal_static_hbase_pb_SpaceQuotaStatus_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hbase_pb_SpaceQuotaStatus_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_SpaceQuotaStatus_descriptor,
new java.lang.String[] { "ViolationPolicy", "InViolation", });
internal_static_hbase_pb_SpaceQuotaSnapshot_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hbase_pb_SpaceQuotaSnapshot_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_SpaceQuotaSnapshot_descriptor,
new java.lang.String[] { "QuotaStatus", "QuotaUsage", "QuotaLimit", });
internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_descriptor,
new java.lang.String[] { });
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_descriptor,
new java.lang.String[] { "Sizes", });
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_descriptor =
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_descriptor.getNestedTypes().get(0);
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetSpaceQuotaRegionSizesResponse_RegionSizes_descriptor,
new java.lang.String[] { "TableName", "Size", });
internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetSpaceQuotaSnapshotsRequest_descriptor,
new java.lang.String[] { });
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_descriptor,
new java.lang.String[] { "Snapshots", });
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_descriptor =
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_descriptor.getNestedTypes().get(0);
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetSpaceQuotaSnapshotsResponse_TableQuotaSnapshot_descriptor,
new java.lang.String[] { "TableName", "Snapshot", });
internal_static_hbase_pb_GetQuotaStatesRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_hbase_pb_GetQuotaStatesRequest_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetQuotaStatesRequest_descriptor,
new java.lang.String[] { });
internal_static_hbase_pb_GetQuotaStatesResponse_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_hbase_pb_GetQuotaStatesResponse_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetQuotaStatesResponse_descriptor,
new java.lang.String[] { "TableSnapshots", "NsSnapshots", });
internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_descriptor =
internal_static_hbase_pb_GetQuotaStatesResponse_descriptor.getNestedTypes().get(0);
internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetQuotaStatesResponse_TableQuotaSnapshot_descriptor,
new java.lang.String[] { "TableName", "Snapshot", });
internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_descriptor =
internal_static_hbase_pb_GetQuotaStatesResponse_descriptor.getNestedTypes().get(1);
internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_fieldAccessorTable = new
org.apache.hbase.thirdparty.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_GetQuotaStatesResponse_NamespaceQuotaSnapshot_descriptor,
new java.lang.String[] { "Namespace", "Snapshot", });
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
/*
* Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* The MIT License
*
* Copyright (c) 2004-2015 Paul R. Holser, Jr.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package jdk.internal.joptsimple.internal;
import static java.lang.System.getProperty;
import static java.util.Arrays.asList;
import java.util.Iterator;
/**
* @author <a href="mailto:pholser@alumni.rice.edu">Paul Holser</a>
*/
public final class Strings {
public static final String EMPTY = "";
public static final String LINE_SEPARATOR = getProperty( "line.separator" );
private Strings() {
throw new UnsupportedOperationException();
}
/**
* Gives a string consisting of the given character repeated the given number of times.
*
* @param ch the character to repeat
* @param count how many times to repeat the character
* @return the resultant string
*/
public static String repeat( char ch, int count ) {
StringBuilder buffer = new StringBuilder();
for ( int i = 0; i < count; ++i )
buffer.append( ch );
return buffer.toString();
}
/**
* Tells whether the given string is either {@code} or consists solely of whitespace characters.
*
* @param target string to check
* @return {@code true} if the target string is null or empty
*/
public static boolean isNullOrEmpty( String target ) {
return target == null || target.isEmpty();
}
/**
* Gives a string consisting of a given string prepended and appended with surrounding characters.
*
* @param target a string
* @param begin character to prepend
* @param end character to append
* @return the surrounded string
*/
public static String surround( String target, char begin, char end ) {
return begin + target + end;
}
/**
* Gives a string consisting of the elements of a given array of strings, each separated by a given separator
* string.
*
* @param pieces the strings to join
* @param separator the separator
* @return the joined string
*/
public static String join( String[] pieces, String separator ) {
return join( asList( pieces ), separator );
}
/**
* Gives a string consisting of the string representations of the elements of a given array of objects,
* each separated by a given separator string.
*
* @param pieces the elements whose string representations are to be joined
* @param separator the separator
* @return the joined string
*/
public static String join( Iterable<String> pieces, String separator ) {
StringBuilder buffer = new StringBuilder();
for ( Iterator<String> iter = pieces.iterator(); iter.hasNext(); ) {
buffer.append( iter.next() );
if ( iter.hasNext() )
buffer.append( separator );
}
return buffer.toString();
}
}
|
package model;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.LinkedList;
public class User extends Model {
private String username;
private String password;
private String profileName;
private String firstName;
private String lastName;
private String email;
private String phoneNumber;
private LocalDate birthday;
private String bio;
private ID avatar;
private LocalDateTime lastSeen;
private boolean isBirthdaySet;
private boolean isActive;
private boolean isPublic;
private LastSeenType lastSeenType;
private LinkedList<ID> following;
private LinkedList<ID> followers;
private LinkedList<ID> blockedUsers;
private LinkedList<ID> mutedUsers;
private LinkedList<ID> requested;
private LinkedList<ID> requesters;
private LinkedList<ID> tweets;
private LinkedList<ID> likedTweets;
private LinkedList<ID> chatLinks;
private LinkedList<ID> notifs;
public enum LastSeenType{
everyone,
followings,
no_one
}
public User(ID ID, String username, String password,
String profileName, String firstName, String lastName,
String email, String phoneNumber, LocalDate birthday) {
super(ID);
this.username = username;
this.password = password;
this.profileName = profileName;
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
this.phoneNumber = phoneNumber;
this.birthday = birthday;
this.avatar = null;
this.isBirthdaySet = true;
this.bio = "";
this.lastSeen = LocalDateTime.now();
this.isActive = true;
this.isPublic = true;
this.following = new LinkedList<>();
this.followers = new LinkedList<>();
this.blockedUsers = new LinkedList<>();
this.mutedUsers = new LinkedList<>();
this.requested = new LinkedList<>();
this.requesters = new LinkedList<>();
this.lastSeenType = LastSeenType.everyone;
this.tweets = new LinkedList<>();
this.likedTweets = new LinkedList<>();
this.chatLinks = new LinkedList<>();
this.notifs = new LinkedList<>();
}
public User(ID ID, String username, String password,
String profileName, String firstName, String lastName,
String email, String phoneNumber) {
super(ID);
this.username = username;
this.password = password;
this.profileName = profileName;
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
this.phoneNumber = phoneNumber;
this.birthday = null;
this.avatar = null;
this.isBirthdaySet = false;
this.bio = "";
this.lastSeen = LocalDateTime.now();
this.isActive = true;
this.isPublic = true;
this.following = new LinkedList<>();
this.followers = new LinkedList<>();
this.blockedUsers = new LinkedList<>();
this.mutedUsers = new LinkedList<>();
this.requested = new LinkedList<>();
this.requesters = new LinkedList<>();
this.lastSeenType = LastSeenType.everyone;
this.tweets = new LinkedList<>();
this.likedTweets = new LinkedList<>();
this.chatLinks = new LinkedList<>();
this.notifs = new LinkedList<>();
}
public LinkedList<ID> getFollowing() {
return following;
}
public LinkedList<ID> getFollowers() {
return followers;
}
public LinkedList<ID> getBlockedUsers() {
return blockedUsers;
}
public LinkedList<ID> getMutedUsers() {
return mutedUsers;
}
public LinkedList<ID> getRequested() {
return requested;
}
public LinkedList<ID> getRequesters() {
return requesters;
}
public LinkedList<ID> getLikedTweets() {
return likedTweets;
}
public ID getAvatar() {
return avatar;
}
public boolean isBirthdaySet() {
return isBirthdaySet;
}
public boolean isPublic() {
return isPublic;
}
public LastSeenType getLastSeenType() {
return lastSeenType;
}
public LinkedList<ID> getTweets() {
return tweets;
}
public LinkedList<ID> getChatLinks() {
return chatLinks;
}
public LinkedList<ID> getNotifs() {
return notifs;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
public String getProfileName() {
return profileName;
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
public String getEmail() {
return email;
}
public String getPhoneNumber() {
return phoneNumber;
}
public LocalDate getBirthday() {
return birthday;
}
public String getBio() {
return bio;
}
public LocalDateTime getLastSeen() {
return lastSeen;
}
public boolean isActive() {
return isActive;
}
public void setPassword(String password) {
this.password = password;
}
public void setPublic(boolean aPublic) {
isPublic = aPublic;
}
public void setLastSeenType(LastSeenType lastSeenType) {
this.lastSeenType = lastSeenType;
}
public void setActive(boolean active) {
isActive = active;
}
}
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dmfs.tasks.utils;
import android.database.DataSetObservable;
import android.support.v4.util.SparseArrayCompat;
public class ObservableSparseArrayCompat<E> extends SparseArrayCompat<E>
{
private final DataSetObservable mDataSetObservable;
public ObservableSparseArrayCompat()
{
super();
mDataSetObservable = new DataSetObservable();
}
public ObservableSparseArrayCompat(final int initialCapacity)
{
super(initialCapacity);
mDataSetObservable = new DataSetObservable();
}
public DataSetObservable getDataSetObservable()
{
return mDataSetObservable;
}
private void notifyChanged()
{
mDataSetObservable.notifyChanged();
}
@Override
public void append(final int key, final E value)
{
super.append(key, value);
notifyChanged();
}
@Override
public void clear()
{
super.clear();
notifyChanged();
}
@Override
public void delete(final int key)
{
super.delete(key);
notifyChanged();
}
@Override
public void put(final int key, final E value)
{
super.put(key, value);
notifyChanged();
}
@Override
public void remove(final int key)
{
super.remove(key);
notifyChanged();
}
@Override
public void removeAt(final int index)
{
super.removeAt(index);
notifyChanged();
}
@Override
public void removeAtRange(final int index, final int size)
{
super.removeAtRange(index, size);
notifyChanged();
}
@Override
public void setValueAt(final int index, final E value)
{
super.setValueAt(index, value);
notifyChanged();
}
}
|
package com.nepxion.discovery.console.rest;
/**
* <p>Title: Nepxion Discovery</p>
* <p>Description: Nepxion Discovery</p>
* <p>Copyright: Copyright (c) 2017-2050</p>
* <p>Company: Nepxion</p>
* @author Haojun Ren
* @version 1.0
*/
import org.springframework.web.client.RestTemplate;
import com.nepxion.discovery.console.entity.GatewayType;
import com.nepxion.discovery.console.resource.ServiceResource;
public class RouteAddRestInvoker extends AbstractRestInvoker {
private GatewayType gatewayType;
private String route;
public RouteAddRestInvoker(ServiceResource serviceResource, String serviceId, RestTemplate restTemplate, GatewayType gatewayType, String route) {
super(serviceResource, serviceId, restTemplate);
this.gatewayType = gatewayType;
this.route = route;
}
@Override
protected String getDescription() {
return "Route added";
}
@Override
protected String getSuffixPath() {
return gatewayType + "-route/add";
}
@Override
protected String doRest(String url) {
return restTemplate.postForEntity(url, getInvokeEntity(route), String.class).getBody();
}
}
|
package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 模板信息(基于合同模板填充内容生成待签文件,并指定签署人)
*
* @author auto create
* @since 1.0, 2020-05-15 10:41:38
*/
public class TemplateInfoBean extends AlipayObject {
private static final long serialVersionUID = 2575159558144841982L;
/**
* 模板填充项,根据组件key值传入填写内容
*/
@ApiListField("fill_contents")
@ApiField("fill_content")
private List<FillContent> fillContents;
/**
* 签署文件名称
*/
@ApiField("name")
private String name;
/**
* 签署区,根据签署区key值传入对应的签署人信息
*/
@ApiListField("signfields")
@ApiField("sign_field_bean")
private List<SignFieldBean> signfields;
/**
* 模板id,通过创建合同模板获取
*/
@ApiField("template_id")
private String templateId;
public List<FillContent> getFillContents() {
return this.fillContents;
}
public void setFillContents(List<FillContent> fillContents) {
this.fillContents = fillContents;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public List<SignFieldBean> getSignfields() {
return this.signfields;
}
public void setSignfields(List<SignFieldBean> signfields) {
this.signfields = signfields;
}
public String getTemplateId() {
return this.templateId;
}
public void setTemplateId(String templateId) {
this.templateId = templateId;
}
}
|
/*
* -
* #%L
* Pipeline: AWS Steps
* %%
* Copyright (C) 2016 Taimos GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.taimos.pipeline.aws;
import javax.inject.Inject;
import org.jenkinsci.plugins.workflow.steps.AbstractStepDescriptorImpl;
import org.jenkinsci.plugins.workflow.steps.AbstractStepImpl;
import org.jenkinsci.plugins.workflow.steps.AbstractSynchronousStepExecution;
import org.jenkinsci.plugins.workflow.steps.StepContextParameter;
import org.kohsuke.stapler.DataBoundConstructor;
import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClient;
import com.amazonaws.services.securitytoken.model.GetCallerIdentityRequest;
import com.amazonaws.services.securitytoken.model.GetCallerIdentityResult;
import hudson.EnvVars;
import hudson.Extension;
import hudson.model.TaskListener;
public class AWSIdentityStep extends AbstractStepImpl {
@DataBoundConstructor
public AWSIdentityStep() {
//
}
@Extension
public static class DescriptorImpl extends AbstractStepDescriptorImpl {
public DescriptorImpl() {
super(Execution.class);
}
@Override
public String getFunctionName() {
return "awsIdentity";
}
@Override
public String getDisplayName() {
return "Print the AWS identity";
}
}
public static class Execution extends AbstractSynchronousStepExecution<Void> {
@Inject
private transient AWSIdentityStep step;
@StepContextParameter
private transient EnvVars envVars;
@StepContextParameter
private transient TaskListener listener;
@Override
protected Void run() throws Exception {
AWSSecurityTokenServiceClient sts = AWSClientFactory.create(AWSSecurityTokenServiceClient.class, this.envVars);
GetCallerIdentityResult identity = sts.getCallerIdentity(new GetCallerIdentityRequest());
this.listener.getLogger().format("Current AWS identity: %s - %s - %s %n", identity.getAccount(), identity.getUserId(), identity.getArn());
return null;
}
private static final long serialVersionUID = 1L;
}
}
|
/*
* Copyright 2017 Jean-Louis Pasturel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.jlp.logfouineur.ui.controller;
import javafx.event.EventHandler;
import javafx.scene.control.MenuItem;
import javafx.scene.control.TextField;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
import javafx.scene.control.Button;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jlp.logfouineur.filestat.ui.DiagFileStats;
import org.jlp.logfouineur.ui.DialogNew;
import org.jlp.logfouineur.ui.DialogOpen;
import org.jlp.logfouineur.ui.DialogScenario;
import org.jlp.logfouineur.ui.LogFouineurMain;
import org.jlp.logfouineur.util.AlertDialog;
import javafx.event.ActionEvent;
// TODO: Auto-generated Javadoc
/**
* The Class MouseEventHandler.
*/
public class MouseEventHandler implements EventHandler<MouseEvent> {
/** The a stage. */
public Stage aStage = null;
/**
* Instantiates a new mouse event handler.
*
* @param aStage the a stage
*/
public MouseEventHandler(Stage aStage) {
this.aStage = aStage;
}
/**
* Handle.
*
* @param event the event
*/
/* (non-Javadoc)
* @see javafx.event.EventHandler#handle(javafx.event.Event)
*/
@Override
public void handle(MouseEvent event) {
if (event.getSource() instanceof Button) {
System.out.println("((MouseEvent)event.getSource()).getId() =" + ((Button) event.getSource()).getId()); //$NON-NLS-1$
switch (((Button) event.getSource()).getId()) {
case "bNewProjectCreate": //$NON-NLS-1$
System.out.println("Button => Create newProject =>" + DialogNew.tfNameProject.getText()); //$NON-NLS-1$
String newProject = DialogNew.tfNameProject.getText();
if (null == newProject || newProject.length() < 4) {
new AlertDialog(aStage, Messages.getString("MouseEventHandler.3"), //$NON-NLS-1$
AlertDialog.ICON_ERROR).showAndWait();
;
DialogNew.tfNameProject.setText(""); //$NON-NLS-1$
} else if (DialogNew.comboBox.getItems().contains((String) newProject)) {
new AlertDialog(aStage, Messages.getString("MouseEventHandler.5"), //$NON-NLS-1$
AlertDialog.ICON_ERROR).showAndWait();
;
DialogNew.tfNameProject.setText(""); //$NON-NLS-1$
} else {
// dates validation
if (validationDates(DialogNew.tfBeginScn, DialogNew.tfEndScn)) {
String scenario = LogFouineurMain.prefixScenario + "default"; //$NON-NLS-1$
if (DialogNew.tfNameScenario.getText().length() > 0) {
if (DialogNew.tfNameScenario.getText().startsWith(LogFouineurMain.prefixScenario)) {
scenario = DialogNew.tfNameScenario.getText();
} else {
scenario = LogFouineurMain.prefixScenario + DialogNew.tfNameScenario.getText();
}
}
String strDateBegin = "1970/01/01:00:00:00";
String strDateFin = new SimpleDateFormat("yyyy/MM/dd:HH:mm:ss").format(new Date());
if (DialogNew.tfBeginScn.getText().length() > 0)
strDateBegin = DialogNew.tfBeginScn.getText();
if (DialogNew.tfEndScn.getText().length() > 0)
strDateFin = DialogNew.tfEndScn.getText();
LogFouineurMain.currentProject = newProject;
// activate/deactivate Menu
LogFouineurMain.handleMenuState(true);
String strPath = LogFouineurMain.workspace + File.separator + newProject;
String strPathTemp = strPath + File.separator + "tmp"; //$NON-NLS-1$
String strPathTemplate = strPath + File.separator + "templates"; //$NON-NLS-1$
String strPathTemplateStat = strPath + File.separator + "templates" + File.separator //$NON-NLS-1$
+ "filestat"; //$NON-NLS-1$
String strPathTemplateLog = strPath + File.separator + "templates" + File.separator //$NON-NLS-1$
+ "logparser"; //$NON-NLS-1$
String strPathScn = strPath + File.separator + scenario;
String strPathScnLog = strPath + File.separator + scenario + File.separator + "logs"; //$NON-NLS-1$
String strPathScnLogConf = strPath + File.separator + scenario + File.separator + "logs" //$NON-NLS-1$
+ File.separator + "config"; //$NON-NLS-1$
String strPathScnLogConfLogParser = strPath + File.separator + scenario + File.separator + "logs" //$NON-NLS-1$
+ File.separator + "config"+File.separator+"parselog";
String strPathScnLogConfFileStats = strPath + File.separator + scenario + File.separator + "logs" //$NON-NLS-1$
+ File.separator + "config"+File.separator+"filestat";
String strPathScnCsv = strPath + File.separator + scenario + File.separator + "csv"; //$NON-NLS-1$
Path path = new File(strPath).toPath();
Path pathTemp = new File(strPathTemp).toPath();
Path pathTemplate = new File(strPathTemplate).toPath();
Path pathTemplateStat = new File(strPathTemplateStat).toPath();
Path pathTemplateLog = new File(strPathTemplateLog).toPath();
Path pathScn = new File(strPathScn).toPath();
Path pathScnLog = new File(strPathScnLog).toPath();
Path pathScnLogConf = new File(strPathScnLogConf).toPath();
Path pathScnCsv = new File(strPathScnCsv).toPath();
Path pathScnLogConfLogParser=new File(strPathScnLogConfLogParser).toPath();
Path pathScnLogConfFileStats=new File(strPathScnLogConfFileStats).toPath();
try {
Files.createDirectory(path);
Files.createDirectory(pathTemp);
Files.createDirectory(pathTemplate);
Files.createDirectory(pathTemplateStat);
Files.createDirectory(pathTemplateLog);
Files.createDirectory(pathScn);
Files.createDirectory(pathScnLog);
Files.createDirectory(pathScnLogConf);
Files.createDirectory(pathScnCsv);
Files.createDirectory(pathScnLogConfLogParser);
Files.createDirectory(pathScnLogConfFileStats);
LogFouineurMain.scenariosProps.put("listScenarios", scenario + " ");
LogFouineurMain.scenariosProps.put(scenario + ".dateBegin", strDateBegin);
LogFouineurMain.scenariosProps.put(scenario + ".dateEnd", strDateFin);
String strPropsConf = strPath + File.separator + "scenarios.properties";
try {
LogFouineurMain.scenariosProps.store(Files.newOutputStream(new File(strPropsConf).toPath()),
"Creation of First scenario");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
new AlertDialog(aStage,
Messages.getString("MouseEventHandler.1") + newProject //$NON-NLS-1$
+ Messages.getString("MouseEventHandler.0") + scenario, //$NON-NLS-1$
AlertDialog.ICON_INFO).showAndWait();
LogFouineurMain.currentScenario = scenario;
LogFouineurMain.primaryStage
.setTitle("LogFouineurMain V1.0 : Project : " + LogFouineurMain.currentProject
+ ", with scenario : " + LogFouineurMain.currentScenario);
LogFouineurMain.handleMenuState(false);
aStage.hide();
} catch (IOException e) {
LogFouineurMain.handleMenuState(false);
LogFouineurMain.currentProject = ""; //$NON-NLS-1$
DialogNew.tfNameProject.setText(""); //$NON-NLS-1$
new AlertDialog(aStage, Messages.getString("MouseEventHandler.14"), //$NON-NLS-1$
AlertDialog.ICON_ERROR).showAndWait();
e.printStackTrace();
}
} else {
}
}
break;
case "bOpenProject":
// update the begin and end date
Path pathProject = FileSystems.getDefault()
.getPath(LogFouineurMain.workspace + File.separator + LogFouineurMain.currentProject);
// RFetrieve dates in scenarios.properties for this
// project/scenario
//Properties propsScn = new Properties();
String strPropsConf = pathProject.toString() + File.separator + "scenarios.properties";
try {
LogFouineurMain.scenariosProps.load(Files.newInputStream(new File(strPropsConf).toPath()));
// update Properties
// Verify that dates are correct if not old date are
// conserved
Pattern pat = Pattern.compile("(1|2)\\d{3}/\\d\\d/\\d\\d:\\d\\d:\\d\\d:\\d\\d");
if (pat.matcher(DialogOpen.tfBeginScn.getText()).find())
LogFouineurMain.scenariosProps.setProperty(LogFouineurMain.currentScenario + ".dateBegin",
DialogOpen.tfBeginScn.getText());
if (pat.matcher(DialogOpen.tfEndScn.getText()).find())
LogFouineurMain.scenariosProps.setProperty(LogFouineurMain.currentScenario + ".dateEnd",
DialogOpen.tfEndScn.getText());
LogFouineurMain.scenariosProps.store(Files.newOutputStream(new File(strPropsConf).toPath()), "");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// activate/deactivate Menu
LogFouineurMain.handleMenuState(false);
LogFouineurMain.primaryStage.setTitle("LogFouineurMain V1.0 : Project : "
+ LogFouineurMain.currentProject + ", with scenario : " + LogFouineurMain.currentScenario);
addSubMenusLocalTemplate();
aStage.hide();
break;
case "bCreateChange":
if (DialogScenario.bCreateChange.getText().equals("Change")) {
pathProject = FileSystems.getDefault()
.getPath(LogFouineurMain.workspace + File.separator + LogFouineurMain.currentProject);
// RFetrieve dates in scenarios.properties for this
// project/scenario
// retrieve the current scenario fot the project
String newScenario = DialogScenario.comboBoxScn.getValue();
LogFouineurMain.currentScenario = newScenario;
//propsScn = new Properties();
strPropsConf = pathProject.toString() + File.separator + "scenarios.properties";
try {
LogFouineurMain.scenariosProps.load(Files.newInputStream(new File(strPropsConf).toPath()));
// update Properties
// Verify that dates are correct if not old date are
// conserved
Pattern pat = Pattern.compile("(1|2)\\d{3}/\\d\\d/\\d\\d:\\d\\d:\\d\\d:\\d\\d");
if (pat.matcher(DialogScenario.tfBeginScn.getText()).find())
LogFouineurMain.scenariosProps.setProperty(LogFouineurMain.currentScenario + ".dateBegin",
DialogScenario.tfBeginScn.getText());
if (pat.matcher(DialogScenario.tfEndScn.getText()).find())
LogFouineurMain.scenariosProps.setProperty(LogFouineurMain.currentScenario + ".dateEnd",
DialogScenario.tfEndScn.getText());
LogFouineurMain.scenariosProps.store(Files.newOutputStream(new File(strPropsConf).toPath()), "");
LogFouineurMain.primaryStage
.setTitle("LogFouineurMain V1.0 : Project : " + LogFouineurMain.currentProject
+ ", with scenario : " + LogFouineurMain.currentScenario);
aStage.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
// creating a new scenario
// dates validation
if (validationDates(DialogScenario.tfBeginScn, DialogScenario.tfEndScn)) {
String scenario = DialogScenario.tfNameScenario.getText(); // $NON-NLS-1$
if (DialogScenario.tfNameScenario.getText().length() > 0) {
if (!DialogScenario.tfNameScenario.getText().startsWith(LogFouineurMain.prefixScenario)) {
scenario = LogFouineurMain.prefixScenario + DialogScenario.tfNameScenario.getText();
}
}
String strDateBegin = "1970/01/01:00:00:00";
String strDateFin = new SimpleDateFormat("yyyy/MM/dd:HH:mm:ss").format(new Date());
if (DialogScenario.tfBeginScn.getText().length() > 0)
strDateBegin = DialogScenario.tfBeginScn.getText();
if (DialogScenario.tfEndScn.getText().length() > 0)
strDateFin = DialogScenario.tfEndScn.getText();
// activate/deactivate Menu
LogFouineurMain.handleMenuState(true);
String strPath = LogFouineurMain.workspace + File.separator + LogFouineurMain.currentProject;
String strPathTemp = strPath + File.separator + "tmp"; //$NON-NLS-1$
String strPathTemplate = strPath + File.separator + "templates"; //$NON-NLS-1$
String strPathTemplateStat = strPath + File.separator + "templates" + File.separator //$NON-NLS-1$
+ "filestat"; //$NON-NLS-1$
String strPathTemplateLog = strPath + File.separator + "templates" + File.separator //$NON-NLS-1$
+ "logparser"; //$NON-NLS-1$
String strPathScn = strPath + File.separator + scenario;
String strPathScnLog = strPath + File.separator + scenario + File.separator + "logs"; //$NON-NLS-1$
String strPathScnLogConf = strPath + File.separator + scenario + File.separator + "logs" //$NON-NLS-1$
+ File.separator + "config"; //$NON-NLS-1$
String strPathScnCsv = strPath + File.separator + scenario + File.separator + "csv"; //$NON-NLS-1$
Path path = new File(strPath).toPath();
Path pathTemp = new File(strPathTemp).toPath();
Path pathTemplate = new File(strPathTemplate).toPath();
Path pathTemplateStat = new File(strPathTemplateStat).toPath();
Path pathTemplateLog = new File(strPathTemplateLog).toPath();
Path pathScn = new File(strPathScn).toPath();
Path pathScnLog = new File(strPathScnLog).toPath();
Path pathScnLogConf = new File(strPathScnLogConf).toPath();
Path pathScnCsv = new File(strPathScnCsv).toPath();
try {
Files.createDirectory(pathScn);
Files.createDirectory(pathScnLog);
Files.createDirectory(pathScnLogConf);
Files.createDirectory(pathScnCsv);
//Properties scenariosProps = new Properties();
try {
strPropsConf = strPath + File.separator + "scenarios.properties";
LogFouineurMain.scenariosProps.load(Files.newInputStream(new File(strPropsConf).toPath()));
LogFouineurMain.scenariosProps.put("listScenarios", LogFouineurMain.scenariosProps.getProperty("listScenarios")+scenario + " ");
LogFouineurMain.scenariosProps.put(scenario + ".dateBegin", strDateBegin);
LogFouineurMain.scenariosProps.put(scenario + ".dateEnd", strDateFin);
LogFouineurMain.scenariosProps.store(Files.newOutputStream(new File(strPropsConf).toPath()),
"Creation of a new scenario");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
LogFouineurMain.currentScenario = scenario;
new AlertDialog(aStage,
"Adding to the project :" + LogFouineurMain.currentProject //$NON-NLS-1$
+ " the new scenario : " + LogFouineurMain.currentScenario, //$NON-NLS-1$
AlertDialog.ICON_INFO).showAndWait();
LogFouineurMain.primaryStage
.setTitle("LogFouineurMain V1.0 : Project : " + LogFouineurMain.currentProject
+ ", with scenario : " + LogFouineurMain.currentScenario);
LogFouineurMain.handleMenuState(false);
aStage.hide();
} catch (IOException e) {
LogFouineurMain.handleMenuState(false);
LogFouineurMain.currentProject = ""; //$NON-NLS-1$
LogFouineurMain.currentScenario="";
DialogNew.tfNameProject.setText(""); //$NON-NLS-1$
LogFouineurMain.csvPrefix="";
LogFouineurMain.pathToScenario="";
LogFouineurMain. fileToParseBasic="";
new AlertDialog(aStage, Messages.getString("MouseEventHandler.14"), //$NON-NLS-1$
AlertDialog.ICON_ERROR).showAndWait();
e.printStackTrace();
}
} else {
System.out.println("Creation nouveau scenrio manque");
}
}
break;
case "idParseLogs" :
System.out.println("ParseLogs button");
break;
case "exit": //$NON-NLS-1$
System.out.println("Menu Item =>exit"); //$NON-NLS-1$
System.exit(0);
break;
}
} else if (event.getSource() instanceof TextField) {
switch (((TextField) event.getSource()).getId()) {
case "tfNameScenarioCreate":
if (event.getEventType() == MouseEvent.MOUSE_ENTERED) {
DialogScenario.bCreateChange.setText("Create");
} else if (event.getEventType() == MouseEvent.MOUSE_EXITED) {
if (DialogScenario.tfNameScenario.getText().length() > 0) {
DialogScenario.bCreateChange.setText("Create");
} else {
DialogScenario.bCreateChange.setText("Change");
}
}
break;
}
}
}
/**
* Adds the sub menus local template.
*/
private void addSubMenusLocalTemplate() {
// TODO Auto-generated method stub
String strLocTemplateString=LogFouineurMain.workspace+File.separator+LogFouineurMain.currentProject+
File.separator+"templates"+File.separator+"filestat";
File[] lstF=new File(strLocTemplateString).listFiles();
MenuItem[] tabMi=new MenuItem[lstF.length];
int i=0;
for(File file:lstF) {
String prefix=file.getName().split("\\.properties")[0];
tabMi[i]=new MenuItem(prefix);
tabMi[i].setStyle("-fx-text-fill : black;");
tabMi[i].setOnAction(e -> {
System.out.println("loading from localTemplate : "+file.getAbsolutePath());
new DiagFileStats("Loc",file);
});
i++;
}
for ( i=0; i< tabMi.length;i++) {
LogFouineurMain.mFromLocTemplate.getItems().add(tabMi[i]);
}
}
/**
* Validation dates.
*
* @param tfDateBegin the tf date begin
* @param tfDateEnd the tf date end
* @return true, if successful
*/
private boolean validationDates(TextField tfDateBegin, TextField tfDateEnd) {
Pattern patDate = Pattern.compile("(1|2)\\d\\d\\d/\\d\\d/\\d\\d:\\d\\d:\\d\\d:\\d\\d"); //$NON-NLS-1$
if (tfDateBegin.getText().length() == 0 && tfDateEnd.getText().length() == 0) {
return true;
} else if (tfDateBegin.getText().length() != 0 && tfDateEnd.getText().length() == 0) {
if (patDate.matcher(tfDateBegin.getText()).find()) {
return true;
} else {
new AlertDialog(aStage, Messages.getString("MouseEventHandler.10"), AlertDialog.ICON_ERROR) //$NON-NLS-1$
.showAndWait();
return false;
}
} else if (tfDateBegin.getText().length() == 0 && tfDateEnd.getText().length() != 0) {
if (patDate.matcher(tfDateEnd.getText()).find()) {
return true;
} else {
new AlertDialog(aStage, Messages.getString("MouseEventHandler.11"), AlertDialog.ICON_ERROR) //$NON-NLS-1$
.showAndWait();
return false;
}
} else if (tfDateBegin.getText().length() != 0 && tfDateEnd.getText().length() != 0) {
if (patDate.matcher(tfDateEnd.getText()).find() && patDate.matcher(tfDateBegin.getText()).find()) {
return true;
} else {
new AlertDialog(aStage, Messages.getString("MouseEventHandler.12"), AlertDialog.ICON_ERROR) //$NON-NLS-1$
.showAndWait();
return false;
}
}
return false;
}
}
|
package api.rest.publicapi.read.dayvolume.dto.DayVolumeData;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class USDCXRP {
@SerializedName("USDC")
@Expose
public String uSDC;
@SerializedName("XRP")
@Expose
public String xRP;
}
|
/**
* Copyright 2017 Pivotal Software, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.core.instrument.binder.cache;
import com.hazelcast.config.Config;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.IMap;
import io.micrometer.core.Issue;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import static java.util.Collections.emptyList;
import static org.assertj.core.api.Assertions.assertThat;
public class HazelcastCacheMetricsCompatibilityTest extends CacheMeterBinderCompatibilityKit {
private Config config = new Config();
private IMap<String, String> cache = Hazelcast.newHazelcastInstance(config).getMap("mycache");
@Disabled("This only demonstrates why we can't support miss count in Hazelcast.")
@Issue("#586")
@Test
void multiInstanceMissCount() {
IMap<String, String> cache2 = Hazelcast.newHazelcastInstance(config).getMap("mycache");
// Since each member owns 1/N (N being the number of members in the cluster) entries of a distributed map,
// we add two entries so we can deterministically say that each cache will "own" one entry.
cache.put("k1", "v");
cache.put("k2", "v");
cache.get("k1");
cache.get("k2");
// cache stats: hits = 1, gets = 2, puts = 2
// cache2 stats: hits = 1, gets = 0, puts = 0
assertThat(cache.getLocalMapStats().getHits()).isEqualTo(1);
assertThat(cache.getLocalMapStats().getGetOperationCount()).isEqualTo(2);
assertThat(cache2.getLocalMapStats().getHits()).isEqualTo(1);
// ... and this is why we can't calculate miss count in Hazelcast. sorry!
}
@AfterEach
void cleanup() {
Hazelcast.shutdownAll();
}
@Override
public CacheMeterBinder binder() {
return new HazelcastCacheMetrics(cache, emptyList());
}
@Override
public void put(String key, String value) {
cache.put(key, value);
}
@Override
public String get(String key) {
return cache.get(key);
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.cassandra;
import com.facebook.presto.common.predicate.TupleDomain;
import com.facebook.presto.spi.ColumnHandle;
import java.nio.ByteBuffer;
public class CassandraPartition
{
static final String UNPARTITIONED_ID = "<UNPARTITIONED>";
public static final CassandraPartition UNPARTITIONED = new CassandraPartition();
private final String partitionId;
private final byte[] key;
private final TupleDomain<ColumnHandle> tupleDomain;
private final boolean indexedColumnPredicatePushdown;
private CassandraPartition()
{
partitionId = UNPARTITIONED_ID;
tupleDomain = TupleDomain.all();
key = null;
indexedColumnPredicatePushdown = false;
}
public CassandraPartition(byte[] key, String partitionId, TupleDomain<ColumnHandle> tupleDomain, boolean indexedColumnPredicatePushdown)
{
this.key = key;
this.partitionId = partitionId;
this.tupleDomain = tupleDomain;
this.indexedColumnPredicatePushdown = indexedColumnPredicatePushdown;
}
public boolean isUnpartitioned()
{
return partitionId.equals(UNPARTITIONED_ID);
}
public boolean isIndexedColumnPredicatePushdown()
{
return indexedColumnPredicatePushdown;
}
public TupleDomain<ColumnHandle> getTupleDomain()
{
return tupleDomain;
}
public String getPartitionId()
{
return partitionId;
}
@Override
public String toString()
{
return partitionId;
}
public ByteBuffer getKeyAsByteBuffer()
{
return ByteBuffer.wrap(key);
}
public byte[] getKey()
{
return key;
}
}
|
/*
* Copyright (C) 2020 Dremio
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.nessie.iceberg;
import static org.apache.iceberg.types.Types.NestedField.required;
import java.io.File;
import java.nio.file.attribute.PosixFilePermissions;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.iceberg.BaseTable;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableOperations;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.LongType;
import org.apache.iceberg.types.Types.StructType;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dremio.nessie.api.ContentsApi;
import com.dremio.nessie.api.TreeApi;
import com.dremio.nessie.client.NessieClient;
import com.dremio.nessie.client.NessieClient.AuthType;
import com.dremio.nessie.error.NessieConflictException;
import com.dremio.nessie.error.NessieNotFoundException;
import com.dremio.nessie.model.Branch;
import com.dremio.nessie.model.Reference;
abstract class BaseTestIceberg {
private static final Logger LOGGER = LoggerFactory.getLogger(BaseTestIceberg.class);
private static final int NESSIE_PORT = Integer.getInteger("quarkus.http.test-port", 19121);
private static final String NESSIE_ENDPOINT = String.format("http://localhost:%d/api/v1", NESSIE_PORT);
protected static File ALLEY_LOCAL_DIR;
protected NessieCatalog catalog;
protected NessieClient client;
protected TreeApi tree;
protected ContentsApi contents;
protected Configuration hadoopConfig;
protected final String branch;
@BeforeAll
public static void create() throws Exception {
ALLEY_LOCAL_DIR = java.nio.file.Files.createTempDirectory(
"test",
PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx")))
.toFile();
}
public BaseTestIceberg(String branch) {
super();
this.branch = branch;
}
private void resetData() throws NessieConflictException, NessieNotFoundException {
for (Reference r : tree.getAllReferences()) {
if (r instanceof Branch) {
tree.deleteBranch(r.getName(), r.getHash());
} else {
tree.deleteTag(r.getName(), r.getHash());
}
}
tree.createReference(Branch.of("main", null));
}
@BeforeEach
public void beforeEach() throws NessieConflictException, NessieNotFoundException {
String username = "test";
String password = "test123";
this.client = new NessieClient(AuthType.NONE, NESSIE_ENDPOINT, username, password);
tree = client.getTreeApi();
contents = client.getContentsApi();
resetData();
try {
tree.createReference(Branch.of(branch, null));
} catch (Exception e) {
//ignore, already created. Cant run this in BeforeAll as quarkus hasn't disabled auth
}
hadoopConfig = new Configuration();
hadoopConfig.set(NessieCatalog.CONF_NESSIE_URL, NESSIE_ENDPOINT);
hadoopConfig.set(NessieCatalog.CONF_NESSIE_USERNAME, username);
hadoopConfig.set(NessieCatalog.CONF_NESSIE_PASSWORD, password);
hadoopConfig.set(NessieCatalog.CONF_NESSIE_REF, branch);
hadoopConfig.set(NessieCatalog.CONF_NESSIE_AUTH_TYPE, "NONE");
hadoopConfig.set("fs.defaultFS", ALLEY_LOCAL_DIR.toURI().toString());
hadoopConfig.set("fs.file.impl",
org.apache.hadoop.fs.LocalFileSystem.class.getName()
);
catalog = new NessieCatalog(hadoopConfig);
}
protected Table createTable(TableIdentifier tableIdentifier, int count) {
try {
return catalog.createTable(tableIdentifier, schema(count));
} catch (Throwable t) {
LOGGER.error("unable to do create {}", tableIdentifier, t);
throw t;
}
}
protected void createTable(TableIdentifier tableIdentifier) {
Schema schema = new Schema(StructType.of(required(1, "id", LongType.get()))
.fields());
catalog.createTable(tableIdentifier, schema).location();
}
protected static Schema schema(int count) {
List<Types.NestedField> fields = new ArrayList<>();
for (int i = 0; i < count; i++) {
fields.add(required(i, "id" + i, Types.LongType.get()));
}
return new Schema(Types.StructType.of(fields).fields());
}
void createBranch(String name, String hash) throws NessieNotFoundException, NessieConflictException {
tree.createReference(Branch.of(name, hash));
}
@AfterEach
public void afterEach() throws Exception {
catalog.close();
client.close();
catalog = null;
client = null;
hadoopConfig = null;
}
@AfterAll
public static void destroy() throws Exception {
ALLEY_LOCAL_DIR.delete();
}
static String getContent(NessieCatalog catalog, TableIdentifier tableIdentifier) {
Table table = catalog.loadTable(tableIdentifier);
BaseTable baseTable = (BaseTable) table;
TableOperations ops = baseTable.operations();
NessieTableOperations icebergOps = (NessieTableOperations) ops;
return icebergOps.currentMetadataLocation();
}
}
|
package com.eliteams.quick4j.web.model.weixin.message;
/**
* Created by hccl on 2017/11/16.
*/
public class VideoMessage extends BaseMessage {
private Video Video;
public Video getVideo() {
return Video;
}
public void setVideo(Video video) {
Video = video;
}
}
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.apple;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.packages.NativeProvider;
import com.google.devtools.build.lib.packages.Provider;
import com.google.devtools.build.lib.packages.SkylarkInfo;
import com.google.devtools.build.lib.packages.StructImpl;
import com.google.devtools.build.lib.skylarkbuildapi.apple.ApplePlatformApi;
import com.google.devtools.build.lib.skylarkbuildapi.apple.ApplePlatformTypeApi;
import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter;
import java.util.HashMap;
import java.util.Locale;
import javax.annotation.Nullable;
/** An enum that can be used to distinguish between various apple platforms. */
@Immutable
public enum ApplePlatform implements ApplePlatformApi {
IOS_DEVICE("ios_device", "iPhoneOS", PlatformType.IOS, true),
IOS_SIMULATOR("ios_simulator", "iPhoneSimulator", PlatformType.IOS, false),
MACOS("macos", "MacOSX", PlatformType.MACOS, true),
TVOS_DEVICE("tvos_device", "AppleTVOS", PlatformType.TVOS, true),
TVOS_SIMULATOR("tvos_simulator", "AppleTVSimulator", PlatformType.TVOS, false),
WATCHOS_DEVICE("watchos_device", "WatchOS", PlatformType.WATCHOS, true),
WATCHOS_SIMULATOR("watchos_simulator", "WatchSimulator", PlatformType.WATCHOS, false);
private static final ImmutableSet<String> IOS_SIMULATOR_TARGET_CPUS =
ImmutableSet.of("ios_x86_64", "ios_i386");
private static final ImmutableSet<String> IOS_DEVICE_TARGET_CPUS =
ImmutableSet.of("ios_armv6", "ios_arm64", "ios_armv7", "ios_armv7s");
private static final ImmutableSet<String> WATCHOS_SIMULATOR_TARGET_CPUS =
ImmutableSet.of("watchos_i386", "watchos_x86_64");
private static final ImmutableSet<String> WATCHOS_DEVICE_TARGET_CPUS =
ImmutableSet.of("watchos_armv7k", "watchos_arm64_32");
private static final ImmutableSet<String> TVOS_SIMULATOR_TARGET_CPUS =
ImmutableSet.of("tvos_x86_64");
private static final ImmutableSet<String> TVOS_DEVICE_TARGET_CPUS =
ImmutableSet.of("tvos_arm64");
private static final ImmutableSet<String> MACOS_TARGET_CPUS =
ImmutableSet.of("darwin_x86_64");
private static final ImmutableSet<String> BIT_32_TARGET_CPUS =
ImmutableSet.of("ios_i386", "ios_armv7", "ios_armv7s", "watchos_i386", "watchos_armv7k");
private final String skylarkKey;
private final String nameInPlist;
private final PlatformType platformType;
private final boolean isDevice;
ApplePlatform(
String skylarkKey, String nameInPlist, PlatformType platformType, boolean isDevice) {
this.skylarkKey = skylarkKey;
this.nameInPlist = Preconditions.checkNotNull(nameInPlist);
this.platformType = platformType;
this.isDevice = isDevice;
}
@Override
public PlatformType getType() {
return platformType;
}
@Override
public boolean isDevice() {
return isDevice;
}
@Override
public String getNameInPlist() {
return nameInPlist;
}
/**
* Returns the name of the "platform" as it appears in the plist when it appears in all-lowercase.
*/
public String getLowerCaseNameInPlist() {
return nameInPlist.toLowerCase(Locale.US);
}
@Nullable
private static ApplePlatform forTargetCpuNullable(String targetCpu) {
if (IOS_SIMULATOR_TARGET_CPUS.contains(targetCpu)) {
return IOS_SIMULATOR;
} else if (IOS_DEVICE_TARGET_CPUS.contains(targetCpu)) {
return IOS_DEVICE;
} else if (WATCHOS_SIMULATOR_TARGET_CPUS.contains(targetCpu)) {
return WATCHOS_SIMULATOR;
} else if (WATCHOS_DEVICE_TARGET_CPUS.contains(targetCpu)) {
return WATCHOS_DEVICE;
} else if (TVOS_SIMULATOR_TARGET_CPUS.contains(targetCpu)) {
return TVOS_SIMULATOR;
} else if (TVOS_DEVICE_TARGET_CPUS.contains(targetCpu)) {
return TVOS_DEVICE;
} else if (MACOS_TARGET_CPUS.contains(targetCpu)) {
return MACOS;
} else {
return null;
}
}
/**
* Returns true if the platform for the given target cpu and platform type is a known 32-bit
* architecture.
*
* @param platformType platform type that the given cpu value is implied for
* @param arch architecture representation, such as 'arm64'
*/
public static boolean is32Bit(PlatformType platformType, String arch) {
return BIT_32_TARGET_CPUS.contains(cpuStringForTarget(platformType, arch));
}
/**
* Returns the platform cpu string for the given target cpu and platform type.
*
* @param platformType platform type that the given cpu value is implied for
* @param arch architecture representation, such as 'arm64'
*/
public static String cpuStringForTarget(PlatformType platformType, String arch) {
switch (platformType) {
case MACOS:
return String.format("darwin_%s", arch);
default:
return String.format("%s_%s", platformType.toString(), arch);
}
}
/**
* Returns the platform for the given target cpu and platform type.
*
* @param platformType platform type that the given cpu value is implied for
* @param arch architecture representation, such as 'arm64'
* @throws IllegalArgumentException if there is no valid apple platform for the given target cpu
*/
public static ApplePlatform forTarget(PlatformType platformType, String arch) {
return forTargetCpu(cpuStringForTarget(platformType, arch));
}
/**
* Returns the platform for the given target cpu.
*
* @param targetCpu cpu value with platform type prefix, such as 'ios_arm64'
* @throws IllegalArgumentException if there is no valid apple platform for the given target cpu
*/
public static ApplePlatform forTargetCpu(String targetCpu) {
ApplePlatform platform = forTargetCpuNullable(targetCpu);
if (platform != null) {
return platform;
} else {
throw new IllegalArgumentException(
"No supported apple platform registered for target cpu " + targetCpu);
}
}
/**
* Returns true if the given target cpu is an apple platform.
*/
public static boolean isApplePlatform(String targetCpu) {
return forTargetCpuNullable(targetCpu) != null;
}
/** Returns a Skylark struct that contains the instances of this enum. */
public static StructImpl getSkylarkStruct() {
Provider constructor = new NativeProvider<StructImpl>(StructImpl.class, "platforms") {};
HashMap<String, Object> fields = new HashMap<>();
for (ApplePlatform type : values()) {
fields.put(type.skylarkKey, type);
}
return SkylarkInfo.createSchemaless(constructor, fields, Location.BUILTIN);
}
@Override
public void repr(SkylarkPrinter printer) {
printer.append(toString());
}
/**
* Value used to describe Apple platform "type". A {@link ApplePlatform} is implied from a
* platform type (for example, watchOS) together with a cpu value (for example, armv7).
*/
// TODO(cparsons): Use these values in static retrieval methods in this class.
@Immutable
public enum PlatformType implements ApplePlatformTypeApi {
IOS("ios"),
WATCHOS("watchos"),
TVOS("tvos"),
MACOS("macos");
/**
* The key used to access the enum value as a field in the Skylark apple_common.platform_type
* struct.
*/
private final String skylarkKey;
PlatformType(String skylarkKey) {
this.skylarkKey = skylarkKey;
}
@Override
public String toString() {
return name().toLowerCase();
}
/**
* Returns the {@link PlatformType} with given name (case insensitive).
*
* @throws IllegalArgumentException if the name does not match a valid platform type.
*/
public static PlatformType fromString(String name) {
for (PlatformType platformType : PlatformType.values()) {
if (name.equalsIgnoreCase(platformType.toString())) {
return platformType;
}
}
throw new IllegalArgumentException(String.format("Unsupported platform type \"%s\"", name));
}
/** Returns a Skylark struct that contains the instances of this enum. */
public static StructImpl getSkylarkStruct() {
Provider constructor = new NativeProvider<StructImpl>(StructImpl.class, "platform_types") {};
HashMap<String, Object> fields = new HashMap<>();
for (PlatformType type : values()) {
fields.put(type.skylarkKey, type);
}
return SkylarkInfo.createSchemaless(constructor, fields, Location.BUILTIN);
}
@Override
public void repr(SkylarkPrinter printer) {
printer.append(toString());
}
}
}
|
/**
* Service layer beans.
*/
package org.launchcode.oddjobs.service;
|
package com.x.common.core.application.component;
import java.io.File;
import java.util.Collection;
import java.util.TreeMap;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.apache.commons.lang3.StringUtils;
import com.x.base.core.gson.XGsonBuilder;
import com.x.base.core.utils.zip.JarTools;
public abstract class Assemble extends Deployable {
private File concreteStructure(String distPath, String repositoryPath, String centerHost, Integer centerPort,
String centerCipher, String configApplicationServer) throws Exception {
File dir = new File(distPath, this.getName());
FileUtils.forceMkdir(dir);
FileUtils.cleanDirectory(dir);
File webInf = new File(dir, "WEB-INF");
FileUtils.forceMkdir(webInf);
File lib = new File(webInf, "lib");
FileUtils.forceMkdir(lib);
File classes = new File(webInf, "classes");
FileUtils.forceMkdir(classes);
File metaInf = new File(classes, "META-INF");
FileUtils.forceMkdir(metaInf);
this.createCenterServerFile(metaInf, centerHost, centerPort, centerCipher);
this.createConfigFile(metaInf, configApplicationServer);
this.copyPersistence(metaInf, repositoryPath);
this.copyJar_independent(lib, repositoryPath);
this.extractJar(classes, repositoryPath);
this.extractZip(dir, repositoryPath);
this.createWebXml(webInf);
return dir;
}
private void createCenterServerFile(File metaInf, String host, Integer port, String cipher) throws Exception {
TreeMap<String, Object> map = new TreeMap<>();
map.put("host", StringUtils.trimToEmpty(host));
map.put("port", null != port ? port : 30080);
map.put("cipher", cipher);
FileUtils.writeStringToFile(new File(metaInf, "centerServer.json"), XGsonBuilder.toJson(map));
}
private void createConfigFile(File metaInf, String configApplicationServer) throws Exception {
TreeMap<String, Object> map = new TreeMap<>();
map.put("applicationServer", StringUtils.trimToEmpty(configApplicationServer));
FileUtils.writeStringToFile(new File(metaInf, "config.json"), XGsonBuilder.toJson(map));
}
private void copyJar_independent(File lib, String repositoryPath) throws Exception {
File repositoryLib = new File(repositoryPath);
FileUtils.copyDirectory(repositoryLib, lib, new WildcardFileFilter("x_base_core*.jar"));
FileUtils.copyDirectory(repositoryLib, lib, new WildcardFileFilter("x_common_core*.jar"));
FileUtils.copyDirectory(new File(repositoryLib, "openjpa"), lib, new WildcardFileFilter("*.jar"));
FileUtils.copyDirectory(new File(repositoryLib, "ehcache"), lib, new WildcardFileFilter("*.jar"));
FileUtils.copyDirectory(new File(repositoryLib, "slf4j"), lib, new WildcardFileFilter("*.jar"));
}
private void copyPersistence(File metaInf, String repositoryPath) throws Exception {
FileUtils.copyFile(new File(repositoryPath, "x_persistence_" + this.getName() + ".xml"),
new File(metaInf, "x_persistence.xml"), false);
}
private void createWebXml(File webInf) throws Exception {
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<web-app id=\"" + this.getName()
+ "\" metadata-complete=\"false\" version=\"3.0\">" + "<display-name>" + this.getName()
+ "</display-name>";
xml += druid_servlet;
xml += druid_servlet_mapping;
xml += druid_filter;
xml += druid_filter_mapping;
xml += "</web-app>";
File file = new File(webInf, "web.xml");
FileUtils.writeStringToFile(file, xml, "UTF-8");
}
private void extractJar(File classes, String repositoryPath) throws Exception {
File repositoryLib = new File(repositoryPath);
Collection<File> files = FileUtils.listFiles(repositoryLib,
new WildcardFileFilter(this.getName() + "-4.0.0.jar"), null);
JarTools.unjar(files.iterator().next(), "", classes, true);
}
private void extractZip(File dir, String repositoryPath) throws Exception {
File file = new File(repositoryPath, this.getName() + ".zip");
JarTools.unjar(file, "", dir, true);
}
public String pack(String distPath, String repositoryPath, String centerHost, Integer centerPort,
String centerCipher, String configApplicationServer) throws Exception {
File dir = this.concreteStructure(distPath, repositoryPath, centerHost, centerPort, centerCipher,
configApplicationServer);
custom(dir, repositoryPath);
File war = new File(distPath, this.getName() + ".war");
JarTools.jar(dir, war);
return war.getAbsolutePath();
}
protected abstract void custom(File dir, String repositoryPath) throws Exception;
protected String getName() {
return StringUtils.replace(this.getClass().getSimpleName(), ".", "_");
}
public class Argument {
private String distPath;
private String repositoryPath;
private String resourcesPath;
private String centerHost;
private Integer centerPort;
private String centerCipher;
private String configApplicationServer;
public String getDistPath() {
return distPath;
}
public void setDistPath(String distPath) {
this.distPath = distPath;
}
public String getRepositoryPath() {
return repositoryPath;
}
public void setRepositoryPath(String repositoryPath) {
this.repositoryPath = repositoryPath;
}
public String getResourcesPath() {
return resourcesPath;
}
public void setResourcesPath(String resourcesPath) {
this.resourcesPath = resourcesPath;
}
public String getCenterHost() {
return centerHost;
}
public void setCenterHost(String centerHost) {
this.centerHost = centerHost;
}
public Integer getCenterPort() {
return centerPort;
}
public void setCenterPort(Integer centerPort) {
this.centerPort = centerPort;
}
public String getCenterCipher() {
return centerCipher;
}
public void setCenterCipher(String centerCipher) {
this.centerCipher = centerCipher;
}
public String getConfigApplicationServer() {
return configApplicationServer;
}
public void setConfigApplicationServer(String configApplicationServer) {
this.configApplicationServer = configApplicationServer;
}
}
}
|
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tk.traiders;
import android.Manifest;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.LocationSource;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
/**
* This shows how to use a custom location source.
*/
public class LocationActivity extends AppCompatActivity implements OnMapReadyCallback {
public void ConfirmLocationClick(View view) {
if(mLocationSource.country == null || mLocationSource.city == null) {
Toast.makeText(this, "Choose a location!", Toast.LENGTH_SHORT).show();
} else {
Intent data = new Intent();
data.putExtra("country",mLocationSource.country);
data.putExtra("city",mLocationSource.city);
setResult(RESULT_OK,data);
finish();
}
}
/**
* A {@link LocationSource} which reports a new location whenever a user long presses the map
* at
* the point at which a user long pressed the map.
*/
private class LongPressLocationSource implements LocationSource, GoogleMap.OnMapLongClickListener {
public String country = null;
public String city = null;
private OnLocationChangedListener mListener;
/**
* Flag to keep track of the activity's lifecycle. This is not strictly necessary in this
* case because onMapLongPress events don't occur while the activity containing the map is
* paused but is included to demonstrate best practices (e.g., if a background service were
* to be used).
*/
private boolean mPaused;
@Override
public void activate(OnLocationChangedListener listener) {
mListener = listener;
}
@Override
public void deactivate() {
mListener = null;
}
@Override
public void onMapLongClick(LatLng point) {
if (mListener != null && !mPaused) {
Location location = new Location("LongPressLocationProvider");
location.setLatitude(point.latitude);
location.setLongitude(point.longitude);
location.setAccuracy(100);
mListener.onLocationChanged(location);
Geocoder gcd = new Geocoder(getApplicationContext(), Locale.getDefault());
List<Address> addresses = null;
try {
addresses = gcd.getFromLocation(point.latitude, point.longitude, 1);
} catch (IOException e) {
e.printStackTrace();
}
if (addresses.size() > 0)
{
country=addresses.get(0).getCountryName();
city = addresses.get(0).getAdminArea();
}
Toast toast = Toast.makeText(getApplicationContext(),
country + " " + city,
Toast.LENGTH_SHORT);
toast.show();
}
}
public void onPause() {
mPaused = true;
}
public void onResume() {
mPaused = false;
}
}
private LongPressLocationSource mLocationSource;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_location);
mLocationSource = new LongPressLocationSource();
SupportMapFragment mapFragment =
(SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
}
@Override
protected void onResume() {
super.onResume();
mLocationSource.onResume();
}
@Override
protected void onPause() {
super.onPause();
mLocationSource.onPause();
}
@Override
public void onMapReady(GoogleMap map) {
map.setLocationSource(mLocationSource);
map.setOnMapLongClickListener(mLocationSource);
map.getUiSettings().setZoomControlsEnabled(true);
map.setMyLocationEnabled(true);
}
}
|
package com.chargerrobotics.subsystems;
import com.chargerrobotics.Constants;
import edu.wpi.first.wpilibj.AddressableLED;
import edu.wpi.first.wpilibj.AddressableLEDBuffer;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.DriverStation.Alliance;
import edu.wpi.first.wpilibj.util.Color;
import edu.wpi.first.wpilibj.util.Color8Bit;
import edu.wpi.first.wpilibj2.command.CommandScheduler;
import edu.wpi.first.wpilibj2.command.SubsystemBase;
@SuppressWarnings("unused")
public class LEDSubsystem extends SubsystemBase {
private static LEDSubsystem instance;
private static final int LED_COUNT = 24;
private static final int INTERVAL = 3;
private static final int WAIT_INTERVAL = 10;
private static final int AUTO_RED_VALUE = 255;
private static final int AUTO_DIM_RED_VALUE = 40;
private LEDMode mode;
private AddressableLED leds;
private AddressableLEDBuffer buffer;
private LED_STATE led_state;
private int led_state_count;
private int counter = WAIT_INTERVAL;
public static LEDSubsystem getInstance() {
if (instance == null) {
instance = new LEDSubsystem();
CommandScheduler.getInstance().registerSubsystem(instance);
}
return instance;
}
public LEDSubsystem() {
leds = new AddressableLED(Constants.leds);
buffer = new AddressableLEDBuffer(LED_COUNT);
leds.setLength(LED_COUNT);
setMode(LEDMode.DISABLED);
leds.start();
led_state = LED_STATE.LED_RIGHT;
led_state_count = 0;
}
public void close() {
leds.stop();
leds.close();
}
public void setMode(LEDMode mode) {
this.mode = mode;
switch (mode) {
case DISABLED:
setAllRGB(0, 0, 0);
break;
case TELEOP:
if (DriverStation.getInstance().getAlliance() == Alliance.Blue) setAllHSV(100, 255, 15);
else if (DriverStation.getInstance().getAlliance() == Alliance.Red) setAllHSV(0, 255, 15);
else setAllHSV(55, 255, 15);
break;
case AUTONOMOUS:
counter = -WAIT_INTERVAL;
buffer.setRGB(0, AUTO_RED_VALUE, 0, 0);
for (int i = 1; i < LED_COUNT; i++) buffer.setRGB(i, 0, 0, 0);
break;
}
leds.setData(buffer);
}
/**
* Sets all leds in the buffer.
*
* @param r the r value [0-255]
* @param g the g value [0-255]
* @param b the b value [0-255]
*/
private void setAllRGB(int r, int g, int b) {
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, r, g, b);
}
/**
* Sets all leds in the buffer.
*
* @param h the h value [0-180]
* @param s the s value [0-255]
* @param v the v value [0-255]
*/
private void setAllHSV(int h, int s, int v) {
if (s == 0) {
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, v, v, v);
return;
}
final int region = h / 30;
final int remainder = (h - (region * 30)) * 6;
final int p = (v * (255 - s)) >> 8;
final int q = (v * (255 - ((s * remainder) >> 8))) >> 8;
final int t = (v * (255 - ((s * (255 - remainder)) >> 8))) >> 8;
switch (region) {
case 0:
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, v, t, p);
break;
case 1:
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, q, v, p);
break;
case 2:
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, p, v, t);
break;
case 3:
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, p, q, v);
break;
case 4:
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, t, p, v);
break;
default:
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, v, p, q);
break;
}
}
/**
* Sets all LEDs in the buffer.
*
* @param color The color of the LED
*/
private void setAllLED(Color color) {
int r = (int) (color.red * 255);
int g = (int) (color.green * 255);
int b = (int) (color.blue * 255);
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, r, g, b);
}
/**
* Sets all LEDs in the buffer.
*
* @param color The color of the LED
*/
private void setAllLED(Color8Bit color) {
for (int i = 0; i < LED_COUNT; i++) buffer.setRGB(i, color.red, color.green, color.blue);
}
public LEDMode getMode() {
return mode;
}
@Override
public void periodic() {
super.periodic();
switch (mode) {
case DISABLED:
return;
case TELEOP:
return;
case AUTONOMOUS:
if (counter % INTERVAL == 0) {
switch (led_state) {
case LED_RIGHT:
onLedRight();
break;
case LED_LEFT:
onLedLeft();
break;
}
}
counter = (counter + 1) % INTERVAL;
leds.setData(buffer);
return;
}
}
private void clearBuffer(AddressableLEDBuffer buf) {
for (int i = 0; i < buf.getLength(); i++) {
buf.setRGB(i, 0, 0, 0);
}
}
private void onLedRight() {
clearBuffer(buffer);
buffer.setRGB(led_state_count, AUTO_DIM_RED_VALUE, 0, 0);
buffer.setRGB(led_state_count + 1, AUTO_RED_VALUE, 0, 0);
led_state_count++;
if (led_state_count >= LED_COUNT - 1) {
led_state_count = 0;
led_state = LED_STATE.LED_LEFT;
}
}
private void onLedLeft() {
clearBuffer(buffer);
buffer.setRGB(LED_COUNT - led_state_count - 1, AUTO_DIM_RED_VALUE, 0, 0);
buffer.setRGB(LED_COUNT - led_state_count - 2, AUTO_RED_VALUE, 0, 0);
led_state_count++;
if (led_state_count >= LED_COUNT - 1) {
led_state_count = 0;
led_state = LED_STATE.LED_RIGHT;
}
}
private static enum LED_STATE {
LED_LEFT,
LED_RIGHT;
}
public static enum LEDMode {
DISABLED,
TELEOP,
AUTONOMOUS;
}
}
|
package org.corfudb.infrastructure;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.protobuf.TextFormat;
import io.micrometer.core.instrument.Timer;
import lombok.extern.slf4j.Slf4j;
import org.corfudb.common.metrics.micrometer.MicroMeterUtils;
import org.corfudb.infrastructure.BatchWriterOperation.Type;
import org.corfudb.infrastructure.log.StreamLog;
import org.corfudb.protocols.CorfuProtocolLogData;
import org.corfudb.protocols.wireprotocol.LogData;
import org.corfudb.protocols.wireprotocol.StreamsAddressResponse;
import org.corfudb.protocols.wireprotocol.TailsResponse;
import org.corfudb.runtime.exceptions.QuotaExceededException;
import org.corfudb.runtime.exceptions.WrongEpochException;
import org.corfudb.runtime.exceptions.unrecoverable.UnrecoverableCorfuInterruptedError;
import org.corfudb.runtime.proto.service.CorfuMessage.RequestMsg;
import org.corfudb.runtime.proto.service.CorfuMessage.RequestPayloadMsg;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.corfudb.protocols.CorfuProtocolLogData.getLogData;
import static org.corfudb.runtime.proto.service.CorfuMessage.PriorityLevel;
/**
* This class manages access for operations that need ordering while executing against
* the backing storage.
*/
@Slf4j
public class BatchProcessor implements AutoCloseable {
private final int BATCH_SIZE;
private final boolean sync;
private final StreamLog streamLog;
private final BlockingQueue<BatchWriterOperation> operationsQueue;
private final ExecutorService processorService;
/**
* The sealEpoch is the epoch up to which all operations have been sealed. Any
* BatchWriterOperation arriving after the sealEpoch with an epoch less than the sealEpoch
* is completed exceptionally with a WrongEpochException.
* This is persisted in the ServerContext by the LogUnitServer to withstand restarts.
*/
private long sealEpoch;
/**
* Returns a new BatchProcessor for a stream log.
*
* @param streamLog stream log for writes (can be in memory or file)
* @param sealEpoch All operations stamped with epoch less than the epochWaterMark are discarded.
* @param sync If true, the batch writer will sync writes to secondary storage
*/
public BatchProcessor(StreamLog streamLog, long sealEpoch, boolean sync) {
this.sealEpoch = sealEpoch;
this.sync = sync;
this.streamLog = streamLog;
BATCH_SIZE = 50;
operationsQueue = new LinkedBlockingQueue<>();
processorService = Executors
.newSingleThreadExecutor(new ThreadFactoryBuilder()
.setDaemon(false)
.setNameFormat("LogUnit-BatchProcessor-%d")
.build());
processorService.submit(this::process);
}
private void recordRunnable(Runnable runnable, Optional<Timer> timer) {
if (timer.isPresent()) {
timer.get().record(runnable);
} else {
runnable.run();
}
}
/**
* Add a task to the processor.
*
* @param type The request type
* @param req The request message
* @return returns a future result for the request, if it expects one
*/
public <T> CompletableFuture<T> addTask(@Nonnull Type type, @Nonnull RequestMsg req) {
BatchWriterOperation<T> op = new BatchWriterOperation<>(type, req);
operationsQueue.add(op);
return op.getFutureResult();
}
private void process() {
if (!sync) {
log.warn("batchWriteProcessor: writes configured to not sync with secondary storage");
}
try {
BatchWriterOperation lastOp = null;
List<BatchWriterOperation<?>> res = new ArrayList<>();
while (true) {
BatchWriterOperation currentOp;
MicroMeterUtils.measure(operationsQueue.size(), "logunit.queue.size");
if (lastOp == null) {
currentOp = operationsQueue.take();
} else {
currentOp = operationsQueue.poll();
if (currentOp == null || res.size() == BATCH_SIZE || currentOp == BatchWriterOperation.SHUTDOWN) {
streamLog.sync(sync);
if (log.isTraceEnabled()) {
log.trace("batchWriteProcessor: completed {} operations", res.size());
}
// At this point we need to complete the requests
// that completed successfully (i.e. haven't failed)
for (BatchWriterOperation op : res) {
if (!op.getFutureResult().isCompletedExceptionally()
&& !op.getFutureResult().isCancelled()) {
op.getFutureResult().complete(op.getResultValue());
}
}
res.clear();
}
}
if (currentOp == null) {
lastOp = null;
} else if (currentOp == BatchWriterOperation.SHUTDOWN) {
log.warn("batchWriteProcessor: shutting down the write processor");
streamLog.sync(true);
break;
} else if (streamLog.quotaExceeded() &&
(currentOp.getRequest().getHeader().getPriority() != PriorityLevel.HIGH)) {
currentOp.getFutureResult().completeExceptionally(
new QuotaExceededException("Quota of " + streamLog.quotaLimitInBytes() + " bytes"));
log.warn("batchWriteProcessor: quota exceeded, dropping request {}",
TextFormat.shortDebugString(currentOp.getRequest()));
} else if (currentOp.getType() == BatchWriterOperation.Type.SEAL &&
(currentOp.getRequest().getPayload().getSealRequest().getEpoch() >= sealEpoch)) {
log.info("batchWriteProcessor: updating epoch from {} to {}",
sealEpoch, currentOp.getRequest().getPayload().getSealRequest().getEpoch());
sealEpoch = currentOp.getRequest().getPayload().getSealRequest().getEpoch();
res.add(currentOp);
lastOp = currentOp;
} else if (currentOp.getRequest().getHeader().getEpoch() != sealEpoch) {
log.warn("batchWriteProcessor: wrong epoch on {} request, seal epoch is {}, and request epoch is {}",
currentOp.getType(), sealEpoch, currentOp.getRequest().getHeader().getEpoch());
currentOp.getFutureResult().completeExceptionally(new WrongEpochException(sealEpoch));
lastOp = currentOp;
} else {
try {
RequestPayloadMsg payload = currentOp.getRequest().getPayload();
switch (currentOp.getType()) {
case PREFIX_TRIM:
final long addr = payload.getTrimLogRequest().getAddress().getSequence();
streamLog.prefixTrim(addr);
break;
case WRITE:
LogData logData = getLogData(payload.getWriteLogRequest().getLogData());
MicroMeterUtils.time(() -> streamLog.append(logData.getGlobalAddress(), logData),
"logunit.write.timer", "type", "single");
break;
case RANGE_WRITE:
List<LogData> range = payload.getRangeWriteLogRequest().getLogDataList()
.stream().map(CorfuProtocolLogData::getLogData).collect(Collectors.toList());
MicroMeterUtils.time(() -> streamLog.append(range),
"logunit.write.timer", "type", "range");
break;
case RESET:
streamLog.reset();
break;
case TAILS_QUERY:
final TailsResponse tails;
switch (payload.getTailRequest().getReqType()) {
case LOG_TAIL:
tails = new TailsResponse(streamLog.getLogTail());
break;
case ALL_STREAMS_TAIL:
tails = streamLog.getAllTails();
break;
default:
throw new UnsupportedOperationException("Unknown request type "
+ payload.getTailRequest().getReqType());
}
tails.setEpoch(sealEpoch);
currentOp.setResultValue(tails);
break;
case LOG_ADDRESS_SPACE_QUERY:
// Retrieve the address space for every stream in the log.
StreamsAddressResponse resp = streamLog.getStreamsAddressSpace();
resp.setEpoch(sealEpoch);
currentOp.setResultValue(resp);
break;
default:
log.warn("batchWriteProcessor: unknown operation {}", currentOp);
}
} catch (Exception e) {
log.error("batchWriteProcessor: stream log error. Batch: [queue size={}]. " +
"StreamLog: [trim mark={}].", operationsQueue.size(), streamLog.getTrimMark(), e);
currentOp.getFutureResult().completeExceptionally(e);
}
res.add(currentOp);
lastOp = currentOp;
}
}
} catch (Exception e) {
log.error("Caught exception in the write processor ", e);
}
}
@Override
public void close() {
operationsQueue.add(BatchWriterOperation.SHUTDOWN);
processorService.shutdown();
try {
processorService.awaitTermination(ServerContext.SHUTDOWN_TIMER.toMillis(),
TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
throw new UnrecoverableCorfuInterruptedError("BatchProcessor close interrupted.", e);
}
}
}
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/enums/user_list_string_rule_item_operator.proto
package com.google.ads.googleads.v9.enums;
/**
* <pre>
* Supported rule operator for string type.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum}
*/
public final class UserListStringRuleItemOperatorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum)
UserListStringRuleItemOperatorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserListStringRuleItemOperatorEnum.newBuilder() to construct.
private UserListStringRuleItemOperatorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserListStringRuleItemOperatorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UserListStringRuleItemOperatorEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UserListStringRuleItemOperatorEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorProto.internal_static_google_ads_googleads_v9_enums_UserListStringRuleItemOperatorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorProto.internal_static_google_ads_googleads_v9_enums_UserListStringRuleItemOperatorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.class, com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible user list string rule item operators.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.UserListStringRuleItemOperator}
*/
public enum UserListStringRuleItemOperator
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Contains.
* </pre>
*
* <code>CONTAINS = 2;</code>
*/
CONTAINS(2),
/**
* <pre>
* Equals.
* </pre>
*
* <code>EQUALS = 3;</code>
*/
EQUALS(3),
/**
* <pre>
* Starts with.
* </pre>
*
* <code>STARTS_WITH = 4;</code>
*/
STARTS_WITH(4),
/**
* <pre>
* Ends with.
* </pre>
*
* <code>ENDS_WITH = 5;</code>
*/
ENDS_WITH(5),
/**
* <pre>
* Not equals.
* </pre>
*
* <code>NOT_EQUALS = 6;</code>
*/
NOT_EQUALS(6),
/**
* <pre>
* Not contains.
* </pre>
*
* <code>NOT_CONTAINS = 7;</code>
*/
NOT_CONTAINS(7),
/**
* <pre>
* Not starts with.
* </pre>
*
* <code>NOT_STARTS_WITH = 8;</code>
*/
NOT_STARTS_WITH(8),
/**
* <pre>
* Not ends with.
* </pre>
*
* <code>NOT_ENDS_WITH = 9;</code>
*/
NOT_ENDS_WITH(9),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Contains.
* </pre>
*
* <code>CONTAINS = 2;</code>
*/
public static final int CONTAINS_VALUE = 2;
/**
* <pre>
* Equals.
* </pre>
*
* <code>EQUALS = 3;</code>
*/
public static final int EQUALS_VALUE = 3;
/**
* <pre>
* Starts with.
* </pre>
*
* <code>STARTS_WITH = 4;</code>
*/
public static final int STARTS_WITH_VALUE = 4;
/**
* <pre>
* Ends with.
* </pre>
*
* <code>ENDS_WITH = 5;</code>
*/
public static final int ENDS_WITH_VALUE = 5;
/**
* <pre>
* Not equals.
* </pre>
*
* <code>NOT_EQUALS = 6;</code>
*/
public static final int NOT_EQUALS_VALUE = 6;
/**
* <pre>
* Not contains.
* </pre>
*
* <code>NOT_CONTAINS = 7;</code>
*/
public static final int NOT_CONTAINS_VALUE = 7;
/**
* <pre>
* Not starts with.
* </pre>
*
* <code>NOT_STARTS_WITH = 8;</code>
*/
public static final int NOT_STARTS_WITH_VALUE = 8;
/**
* <pre>
* Not ends with.
* </pre>
*
* <code>NOT_ENDS_WITH = 9;</code>
*/
public static final int NOT_ENDS_WITH_VALUE = 9;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static UserListStringRuleItemOperator valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static UserListStringRuleItemOperator forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CONTAINS;
case 3: return EQUALS;
case 4: return STARTS_WITH;
case 5: return ENDS_WITH;
case 6: return NOT_EQUALS;
case 7: return NOT_CONTAINS;
case 8: return NOT_STARTS_WITH;
case 9: return NOT_ENDS_WITH;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<UserListStringRuleItemOperator>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
UserListStringRuleItemOperator> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<UserListStringRuleItemOperator>() {
public UserListStringRuleItemOperator findValueByNumber(int number) {
return UserListStringRuleItemOperator.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final UserListStringRuleItemOperator[] VALUES = values();
public static UserListStringRuleItemOperator valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private UserListStringRuleItemOperator(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.UserListStringRuleItemOperator)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum other = (com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Supported rule operator for string type.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum)
com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorProto.internal_static_google_ads_googleads_v9_enums_UserListStringRuleItemOperatorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorProto.internal_static_google_ads_googleads_v9_enums_UserListStringRuleItemOperatorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.class, com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorProto.internal_static_google_ads_googleads_v9_enums_UserListStringRuleItemOperatorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum build() {
com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum buildPartial() {
com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum result = new com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum) {
return mergeFrom((com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum other) {
if (other == com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum)
private static final com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum();
}
public static com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserListStringRuleItemOperatorEnum>
PARSER = new com.google.protobuf.AbstractParser<UserListStringRuleItemOperatorEnum>() {
@java.lang.Override
public UserListStringRuleItemOperatorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UserListStringRuleItemOperatorEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UserListStringRuleItemOperatorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserListStringRuleItemOperatorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListStringRuleItemOperatorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
package cn.wildfirechat.app;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Utils {
public static String getRandomCode(int length) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
sb.append(((int)(Math.random()*100))%10);
}
return sb.toString();
}
public static boolean isMobile(String mobile) {
boolean flag = false;
try {
Pattern p = Pattern.compile("^(1[3-9][0-9])\\d{8}$");
Matcher m = p.matcher(mobile);
flag = m.matches();
} catch (Exception e) {
flag = false;
}
return flag;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.