repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/Clinical/src/ims/clinical/forms/specimenintraopdialog/GlobalContext.java | 4062 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinical.forms.specimenintraopdialog;
import java.io.Serializable;
public final class GlobalContext extends ims.framework.FormContext implements Serializable
{
private static final long serialVersionUID = 1L;
public GlobalContext(ims.framework.Context context)
{
super(context);
Clinical = new ClinicalContext(context);
}
public final class ClinicalContext implements Serializable
{
private static final long serialVersionUID = 1L;
private ClinicalContext(ims.framework.Context context)
{
this.context = context;
}
public boolean getTheatreAppointmentRefIsNotNull()
{
return !cx_ClinicalTheatreAppointmentRef.getValueIsNull(context);
}
public ims.scheduling.vo.Booking_AppointmentRefVo getTheatreAppointmentRef()
{
return (ims.scheduling.vo.Booking_AppointmentRefVo)cx_ClinicalTheatreAppointmentRef.getValue(context);
}
public void setTheatreAppointmentRef(ims.scheduling.vo.Booking_AppointmentRefVo value)
{
cx_ClinicalTheatreAppointmentRef.setValue(context, value);
}
private ims.framework.ContextVariable cx_ClinicalTheatreAppointmentRef = new ims.framework.ContextVariable("Clinical.TheatreAppointmentRef", "_cv_Clinical.TheatreAppointmentRef");
public boolean getIntraOpSpecimenVoIsNotNull()
{
return !cx_ClinicalIntraOpSpecimenVo.getValueIsNull(context);
}
public ims.clinical.vo.SpecimenIntraOpVo getIntraOpSpecimenVo()
{
return (ims.clinical.vo.SpecimenIntraOpVo)cx_ClinicalIntraOpSpecimenVo.getValue(context);
}
public void setIntraOpSpecimenVo(ims.clinical.vo.SpecimenIntraOpVo value)
{
cx_ClinicalIntraOpSpecimenVo.setValue(context, value);
}
private ims.framework.ContextVariable cx_ClinicalIntraOpSpecimenVo = new ims.framework.ContextVariable("Clinical.IntraOpSpecimenVo", "_cv_Clinical.IntraOpSpecimenVo");
private ims.framework.Context context;
}
public ClinicalContext Clinical;
}
| agpl-3.0 |
FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/Clinical/src/ims/clinical/domain/DocumentWorkList.java | 2244 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinical.domain;
// Generated from form domain impl
public interface DocumentWorkList extends ims.domain.DomainInterface
{
}
| agpl-3.0 |
FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/RefMan/vo/beans/CareContextForClinicalNotesVoBean.java | 3984 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.RefMan.vo.beans;
public class CareContextForClinicalNotesVoBean extends ims.vo.ValueObjectBean
{
public CareContextForClinicalNotesVoBean()
{
}
public CareContextForClinicalNotesVoBean(ims.RefMan.vo.CareContextForClinicalNotesVo vo)
{
this.id = vo.getBoId();
this.version = vo.getBoVersion();
this.episodeofcare = vo.getEpisodeOfCare() == null ? null : new ims.vo.RefVoBean(vo.getEpisodeOfCare().getBoId(), vo.getEpisodeOfCare().getBoVersion());
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.RefMan.vo.CareContextForClinicalNotesVo vo)
{
this.id = vo.getBoId();
this.version = vo.getBoVersion();
this.episodeofcare = vo.getEpisodeOfCare() == null ? null : new ims.vo.RefVoBean(vo.getEpisodeOfCare().getBoId(), vo.getEpisodeOfCare().getBoVersion());
}
public ims.RefMan.vo.CareContextForClinicalNotesVo buildVo()
{
return this.buildVo(new ims.vo.ValueObjectBeanMap());
}
public ims.RefMan.vo.CareContextForClinicalNotesVo buildVo(ims.vo.ValueObjectBeanMap map)
{
ims.RefMan.vo.CareContextForClinicalNotesVo vo = null;
if(map != null)
vo = (ims.RefMan.vo.CareContextForClinicalNotesVo)map.getValueObject(this);
if(vo == null)
{
vo = new ims.RefMan.vo.CareContextForClinicalNotesVo();
map.addValueObject(this, vo);
vo.populate(map, this);
}
return vo;
}
public Integer getId()
{
return this.id;
}
public void setId(Integer value)
{
this.id = value;
}
public int getVersion()
{
return this.version;
}
public void setVersion(int value)
{
this.version = value;
}
public ims.vo.RefVoBean getEpisodeOfCare()
{
return this.episodeofcare;
}
public void setEpisodeOfCare(ims.vo.RefVoBean value)
{
this.episodeofcare = value;
}
private Integer id;
private int version;
private ims.vo.RefVoBean episodeofcare;
}
| agpl-3.0 |
P1sec/SigFW | sigfw/sigfw.sigfw/src/test/java/sigfw/tests/Test_SS7Firewall.java | 9601 | /**
* JUNIT SS7Firewall class
*
* SigFW
* Open Source SS7/Diameter firewall
* By Martin Kacer, Philippe Langlois
* Copyright 2017, P1 Security S.A.S and individual contributors
*
* See the AUTHORS in the distribution for a
* full listing of individual contributors.
*
* This is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package sigfw.tests;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mobicents.protocols.api.IpChannelType;
import org.mobicents.protocols.ss7.indicator.NatureOfAddress;
import org.mobicents.protocols.ss7.indicator.RoutingIndicator;
import org.mobicents.protocols.ss7.sccp.message.SccpDataMessage;
import org.mobicents.protocols.ss7.sccp.parameter.GlobalTitle;
import org.mobicents.protocols.ss7.sccp.parameter.SccpAddress;
import static ss7fw.SS7Client.hexStringToByteArray;
import ss7fw.SS7Firewall;
import ss7fw.SS7FirewallConfig;
public class Test_SS7Firewall {
private static SS7Firewall sigfw = null;
private static SccpAddress callingParty;
private static SccpAddress calledParty;
private static void initializeSS7Firewall() {
try {
// Use last config
SS7FirewallConfig.loadConfigFromFile("ss7fw_junit.json");
// TODO use the following directive instead to do not use .last configs
//SS7FirewallConfig.loadConfigFromFile(configName);
} catch (Exception ex) {
java.util.logging.Logger.getLogger(SS7FirewallConfig.class.getName()).log(Level.SEVERE, null, ex);
}
sigfw = new SS7Firewall();
sigfw.unitTesting = true;
try {
sigfw.initializeStack(IpChannelType.SCTP);
} catch (Exception e) {
e.printStackTrace();
}
// set the calling and called GT for unittests
GlobalTitle callingGT = sigfw.sccpStack.getSccpProvider().getParameterFactory().createGlobalTitle("111111111111", 0, org.mobicents.protocols.ss7.indicator.NumberingPlan.ISDN_MOBILE, null, NatureOfAddress.INTERNATIONAL);
GlobalTitle calledGT = sigfw.sccpStack.getSccpProvider().getParameterFactory().createGlobalTitle("000000000000", 0, org.mobicents.protocols.ss7.indicator.NumberingPlan.ISDN_MOBILE, null, NatureOfAddress.INTERNATIONAL);
callingParty = sigfw.sccpStack.getSccpProvider().getParameterFactory().createSccpAddress(RoutingIndicator.ROUTING_BASED_ON_GLOBAL_TITLE, callingGT, 1, 8);
calledParty = sigfw.sccpStack.getSccpProvider().getParameterFactory().createSccpAddress(RoutingIndicator.ROUTING_BASED_ON_GLOBAL_TITLE, calledGT, 2, 8);
}
@BeforeClass
public static void testSS7FirewallInit() {
initializeSS7Firewall();
}
@Test
public void testATI() {
// anyTimeInterrogation
sigfw.resetUnitTestingFlags();
SccpDataMessage sccpDataMessage = sigfw.sccpStack.getSccpProvider().getMessageFactory().createDataMessageClass0(calledParty, callingParty, hexStringToByteArray("627e4804000000026b432841060700118605010101a036603480020780a109060704000001001d03be232821060704000001010101a016a01480099611111111111111f18107961111111111f16c31a12f0201000201473027a009800711111111111111a10f80008100830084010086008500870083099611111111111111f1"), 0, true, null, null);
sigfw.onMessage(sccpDataMessage);
try {
Thread.currentThread().sleep(100);
} catch (InterruptedException ex) {
Logger.getLogger(Test_SS7Firewall.class.getName()).log(Level.SEVERE, null, ex);
}
Assert.assertTrue("anyTimeInterrogation message (opCode 71, TCAP Begin) should be blocked by Cat1", !sigfw.unitTestingFlags_sendSccpMessage);
}
@Test
public void testPSL() {
// provideSubscriberLocation
sigfw.resetUnitTestingFlags();
SccpDataMessage sccpDataMessage = sigfw.sccpStack.getSccpProvider().getMessageFactory().createDataMessageClass0(calledParty, callingParty, hexStringToByteArray("62454804000000536b1a2818060700118605010101a00d600ba1090607040000010026036c21a11f020101020153301730038001010407911111111111118307111111111111f1"), 0, true, null, null);
sigfw.onMessage(sccpDataMessage);
try {
Thread.currentThread().sleep(100);
} catch (InterruptedException ex) {
Logger.getLogger(Test_SS7Firewall.class.getName()).log(Level.SEVERE, null, ex);
}
Assert.assertTrue("provideSubscriberLocation message (opCode 83, TCAP Begin) should be blocked by Cat1", !sigfw.unitTestingFlags_sendSccpMessage);
}
@Test
public void testSAI() {
// sendAuthenticationInfo
sigfw.resetUnitTestingFlags();
SccpDataMessage sccpDataMessage = sigfw.sccpStack.getSccpProvider().getMessageFactory().createDataMessageClass0(calledParty, callingParty, hexStringToByteArray("6516480433119839490402035ea26c08a106020102020138"), 0, true, null, null);
sigfw.onMessage(sccpDataMessage);
try {
Thread.currentThread().sleep(100);
} catch (InterruptedException ex) {
Logger.getLogger(Test_SS7Firewall.class.getName()).log(Level.SEVERE, null, ex);
}
Assert.assertTrue("sendAuthenticationInfo message (opCode 56, TCAP Continue) should be allowed", sigfw.unitTestingFlags_sendSccpMessage);
}
@Test
public void testUSSD() {
// processUnstructuredSSRequest
sigfw.resetUnitTestingFlags();
SccpDataMessage sccpDataMessage = sigfw.sccpStack.getSccpProvider().getMessageFactory().createDataMessageClass0(calledParty, callingParty, hexStringToByteArray("62754804000000016b432841060700118605010101a036603480020780a109060704000001001302be232821060704000001010101a016a01480099611111111111111f18107961111111111f16c28a12602010002013b301e04010f0410aa582ca65ac562b1582c168bc562b1118007911111111111f1"), 0, true, null, null);
sigfw.onMessage(sccpDataMessage);
try {
Thread.currentThread().sleep(100);
} catch (InterruptedException ex) {
Logger.getLogger(Test_SS7Firewall.class.getName()).log(Level.SEVERE, null, ex);
}
Assert.assertTrue("processUnstructuredSSRequest message (opCode 59, TCAP Begin) should be allowed", sigfw.unitTestingFlags_sendSccpMessage);
}
@Test
public void testCL() {
// cancelLocation
sigfw.resetUnitTestingFlags();
SccpDataMessage sccpDataMessage = sigfw.sccpStack.getSccpProvider().getMessageFactory().createDataMessageClass0(calledParty, callingParty, hexStringToByteArray("623b4804000000036b1a2818060700118605010101a00d600ba1090607040000010002036c17a115020101020103a30d040811111111111111f10a0100"), 0, true, null, null);
sigfw.onMessage(sccpDataMessage);
try {
Thread.currentThread().sleep(100);
} catch (InterruptedException ex) {
Logger.getLogger(Test_SS7Firewall.class.getName()).log(Level.SEVERE, null, ex);
}
Assert.assertTrue("cancelLocation message (opCode 3, TCAP Begin) should be blocked by Cat2", !sigfw.unitTestingFlags_sendSccpMessage);
}
@Test
public void testPSI() {
// Provide Subscriber Info
sigfw.resetUnitTestingFlags();
SccpDataMessage sccpDataMessage = sigfw.sccpStack.getSccpProvider().getMessageFactory().createDataMessageClass0(calledParty, callingParty, hexStringToByteArray("623e4804000000466b1a2818060700118605010101a00d600ba109060704000001001c036c1aa1180201010201463010800811111111111111f1a20480008300"), 0, true, null, null);
sigfw.onMessage(sccpDataMessage);
try {
Thread.currentThread().sleep(100);
} catch (InterruptedException ex) {
Logger.getLogger(Test_SS7Firewall.class.getName()).log(Level.SEVERE, null, ex);
}
Assert.assertTrue("provideSubscriberInfo message (opCode 70, TCAP Begin) should be blocked by Cat2", !sigfw.unitTestingFlags_sendSccpMessage);
}
@Test
public void testPRN() {
// provideRoamingNumber
sigfw.resetUnitTestingFlags();
SccpDataMessage sccpDataMessage = sigfw.sccpStack.getSccpProvider().getMessageFactory().createDataMessageClass0(calledParty, callingParty, hexStringToByteArray("625d4804000000046b1a2818060700118605010101a00d600ba1090607040000010003026c39a137020101020104302f800811111111111111f18107111111111111f18207111111111111f1a5080a010104030401a0880791111111111111"), 0, true, null, null);
sigfw.onMessage(sccpDataMessage);
try {
Thread.currentThread().sleep(100);
} catch (InterruptedException ex) {
Logger.getLogger(Test_SS7Firewall.class.getName()).log(Level.SEVERE, null, ex);
}
Assert.assertTrue("provideRoamingNumber message (opCode 4, TCAP Begin) should be blocked by Cat2", !sigfw.unitTestingFlags_sendSccpMessage);
}
}
| agpl-3.0 |
open-health-hub/openmaxims-linux | openmaxims_workspace/OCRR/src/ims/ocrr/domain/base/impl/BaseCumulateResultsImpl.java | 2950 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.ocrr.domain.base.impl;
import ims.domain.impl.DomainImpl;
public abstract class BaseCumulateResultsImpl extends DomainImpl implements ims.ocrr.domain.CumulateResults, ims.domain.impl.Transactional
{
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
public void validatelistResults(ims.core.patient.vo.PatientRefVo patient, ims.ocrr.configuration.vo.AnalyteRefVoCollection analytes, ims.framework.utils.Date startDate, ims.framework.utils.Date endDate, Boolean isTabularView)
{
}
@SuppressWarnings("unused")
public void validategetDataset(ims.ocrr.configuration.vo.AnalyteRefVo analyte)
{
}
@SuppressWarnings("unused")
public void validategetOrder(ims.ocrr.orderingresults.vo.OrderInvestigationRefVo orderRef)
{
}
@SuppressWarnings("unused")
public void validategetOrderSpecimen(ims.ocrr.orderingresults.vo.OrderSpecimenRefVo specimenRef)
{
}
@SuppressWarnings("unused")
public void validategetDTFOrderInvestigation(ims.ocrr.orderingresults.vo.OrderInvestigationRefVo orderInvestigationRef)
{
}
@SuppressWarnings("unused")
public void validategetOrderInvestigation(ims.ocrr.orderingresults.vo.OrderInvestigationRefVo orderInvestigationRef)
{
}
}
| agpl-3.0 |
open-health-hub/openmaxims-linux | openmaxims_workspace/Clinical/src/ims/clinical/domain/base/impl/BaseDementiaAssessmentAMTSComponentImpl.java | 2382 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinical.domain.base.impl;
import ims.domain.impl.DomainImpl;
public abstract class BaseDementiaAssessmentAMTSComponentImpl extends DomainImpl implements ims.clinical.domain.DementiaAssessmentAMTSComponent, ims.domain.impl.Transactional
{
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
public void validategetHintByLookupID(ims.clinicaladmin.vo.lookups.DementiaTermConfig volookup)
{
}
@SuppressWarnings("unused")
public void validatesaveDementia(ims.clinical.vo.DementiaVo voDementia)
{
}
@SuppressWarnings("unused")
public void validategetDementia(ims.core.clinical.vo.DementiaRefVo dementiaRef)
{
}
}
| agpl-3.0 |
qcri-social/Crisis-Computing | aidr-manager/src/main/java/qa/qcri/aidr/manager/dto/TaggerCrisisRequest.java | 1518 | package qa.qcri.aidr.manager.dto;
import qa.qcri.aidr.dbmanager.dto.CollectionDTO;
public class TaggerCrisisRequest {
private String code;
private String name;
private TaggerCrisisType crisisType;
private TaggerUserRequest users;
public TaggerCrisisRequest() {
}
public TaggerCrisisRequest(String code, String name, TaggerCrisisType crisisType, TaggerUserRequest users) {
this.code = code;
this.name = name;
this.crisisType = crisisType;
this.users = users;
}
public CollectionDTO toDTO() throws Exception {
CollectionDTO dto = new CollectionDTO();
dto.setCode(this.getCode());
dto.setName(this.getName());
dto.setIsTrashed(false);
dto.setUsersDTO(this.getUsers() != null ? this.getUsers().toDTO() : null);
dto.setCrisisTypeDTO(this.getCrisisType() != null ? this.getCrisisType().toDTO() : null);
return dto;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public TaggerCrisisType getCrisisType() {
return crisisType;
}
public void setCrisisType(TaggerCrisisType crisisType) {
this.crisisType = crisisType;
}
public TaggerUserRequest getUsers() {
return users;
}
public void setUsers(TaggerUserRequest users) {
this.users = users;
}
}
| agpl-3.0 |
aborg0/RapidMiner-Unuk | src/com/rapidminer/gui/tools/dialogs/AboutBox.java | 10719 | /*
* RapidMiner
*
* Copyright (C) 2001-2013 by Rapid-I and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapid-i.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.gui.tools.dialogs;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Desktop;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Image;
import java.awt.Paint;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.logging.Level;
import javax.imageio.ImageIO;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.KeyStroke;
import com.rapidminer.gui.tools.ResourceAction;
import com.rapidminer.gui.tools.SwingTools;
import com.rapidminer.gui.tools.components.LinkButton;
import com.rapidminer.tools.LogService;
import com.rapidminer.tools.Tools;
import com.rapidminer.tools.plugin.Plugin;
/**
* This dialog displays some informations about the product. The product logo should have a size of approximately 270
* times 70 pixels.
*
* @author Ingo Mierswa
*/
public class AboutBox extends JDialog {
private static final long serialVersionUID = -3889559376722324215L;
private static final String PROPERTY_FILE = "about_infos.properties";
private static final String RAPID_MINER_LOGO_NAME = "rapidminer_logo.png";
public static final Image RAPID_MINER_LOGO;
public static Image backgroundImage = null;
static {
URL url = Tools.getResource(RAPID_MINER_LOGO_NAME);
Image rmLogo = null;
if (url != null) {
try {
rmLogo = ImageIO.read(url);
} catch (IOException e) {
//LogService.getGlobal().logWarning("Cannot load logo for about box. Using empty image...");
LogService.getRoot().log(Level.WARNING, "com.rapidminer.gui.tools.dialogs.AboutBox.loading_logo_error");
}
}
RAPID_MINER_LOGO = rmLogo;
url = Tools.getResource("splashscreen_community.png");
if (url != null) {
try {
backgroundImage = ImageIO.read(url);
} catch (IOException e) {
//LogService.getGlobal().logWarning("Cannot load background for about box. Using empty image...");
LogService.getRoot().log(Level.WARNING, "com.rapidminer.gui.tools.dialogs.AboutBox.loading_background_error");
}
}
}
private ContentPanel contentPanel;
private static class ContentPanel extends JPanel {
private static final long serialVersionUID = -1763842074674706654L;
private static final Paint MAIN_PAINT = Color.LIGHT_GRAY;
private static final int MARGIN = 10;
private Properties properties;
private transient Image productLogo;
public ContentPanel(Properties properties, Image productLogo) {
this.properties = properties;
this.productLogo = productLogo;
int width = 450;
int height = 350;
if (backgroundImage != null) {
width = backgroundImage.getWidth(this);
height = backgroundImage.getHeight(this);
}
setPreferredSize(new Dimension(width, height));
setMinimumSize(new Dimension(width, height));
setMaximumSize(new Dimension(width, height));
}
@Override
public void paint(Graphics g) {
super.paint(g);
((Graphics2D) g).setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
drawMain((Graphics2D) g);
g.setColor(Color.black);
g.drawRect(0, 0, getWidth() - 1, getHeight() - 1);
}
public void drawMain(Graphics2D g) {
g.setPaint(MAIN_PAINT);
g.fillRect(0, 0, getWidth(), getHeight());
if (backgroundImage != null)
g.drawImage(backgroundImage, 0, 0, this);
int nameY = 100 + 26;
g.setFont(new java.awt.Font("SansSerif", java.awt.Font.BOLD, 26));
g.setColor(SwingTools.RAPID_I_BROWN);
if (productLogo != null) {
if ("true".equals(properties.getProperty("textNextToLogo"))) {
g.drawImage(productLogo, 20, 90, this);
g.drawString(properties.getProperty("name"), 20 + productLogo.getWidth(null) + 10, nameY);
} else {
g.drawImage(productLogo, getWidth() / 2 - productLogo.getWidth(this) / 2, 90, this);
}
} else {
g.drawString(properties.getProperty("name"), 20, nameY);
}
int y = 240;
g.setColor(SwingTools.BROWN_FONT_COLOR);
g.setFont(new java.awt.Font("SansSerif", java.awt.Font.BOLD, 11));
drawString(g, properties.getProperty("name") + " " + properties.getProperty("version"), y);
y += 20;
g.setFont(new java.awt.Font("SansSerif", java.awt.Font.PLAIN, 10));
y = drawStringAndAdvance(g, properties.getProperty("name") + " " + properties.getProperty("version"), y);
y = drawStringAndAdvance(g, properties.getProperty("copyright"), y);
y = drawStringAndAdvance(g, properties.getProperty("licensor"), y);
y = drawStringAndAdvance(g, properties.getProperty("license"), y);
y = drawStringAndAdvance(g, properties.getProperty("warranty"), y);
y = drawStringAndAdvance(g, properties.getProperty("more"), y);
}
private int drawStringAndAdvance(Graphics2D g, String string, int y) {
if (string == null) {
return y;
} else {
List<String> lines = new LinkedList<String>();
String[] words = string.split("\\s+");
String current = "";
for (String word : words) {
if (current.length() + word.length() < 80) {
current += word + " ";
} else {
lines.add(current);
current = word + " ";
}
}
if (!current.isEmpty()) {
lines.add(current);
}
for (String line : lines) {
drawString(g, line, y);
y += 15;
}
return y;
}
}
private void drawString(Graphics2D g, String text, int y) {
if (text == null)
return;
float xPos = MARGIN;
float yPos = y;
g.drawString(text, xPos, yPos);
}
}
public AboutBox(Frame owner, String productName, String productVersion, String licensor, String url, String text, boolean renderTextNextToLogo, Image productLogo) {
this(owner, createProperties(productName, productVersion, licensor, url, text, renderTextNextToLogo), productLogo);
}
public AboutBox(Frame owner, String productVersion, Image productLogo) {
this(owner, createProperties(productVersion), productLogo);
}
public AboutBox(Frame owner, Properties properties, Image productLogo) {
super(owner, "About", true);
// if (productLogo == null) {
// productLogo = rapidMinerLogo;
// }
setResizable(false);
setLayout(new BorderLayout());
String name = properties.getProperty("name");
if (name != null) {
setTitle("About " + name);
}
contentPanel = new ContentPanel(properties, productLogo);
add(contentPanel, BorderLayout.CENTER);
JPanel buttonPanel = new JPanel(new GridBagLayout());
// FlowLayout(FlowLayout.RIGHT));
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.BOTH;
final String url = properties.getProperty("url");
if (url != null) {
c.weightx = 1;
c.gridwidth = GridBagConstraints.RELATIVE;
buttonPanel.add(new LinkButton(new ResourceAction("simple_link_action", url) {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
try {
Desktop.getDesktop().browse(new URI(url));
} catch (Exception e1) {
e1.printStackTrace();
}
}
}), c);
}
ResourceAction closeAction = new ResourceAction("close") {
private static final long serialVersionUID = 1407089394491740308L;
public void actionPerformed(ActionEvent e) {
dispose();
}
};
JButton closeButton = new JButton(closeAction);
c.weightx = 0;
c.gridwidth = GridBagConstraints.REMAINDER;
buttonPanel.add(closeButton, c);
add(buttonPanel, BorderLayout.SOUTH);
getRootPane().setDefaultButton(closeButton);
getRootPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false), "CANCEL");
getRootPane().getActionMap().put("CANCEL", closeAction);
pack();
setLocationRelativeTo(owner);
}
public static Properties createProperties(InputStream inputStream, String productVersion) {
Properties properties = new Properties();
if (inputStream != null) {
try {
properties.load(inputStream);
} catch (Exception e) {
//LogService.getGlobal().logError("Cannot read splash screen infos: " + e.getMessage());
LogService.getRoot().log(Level.SEVERE, "com.rapidminer.gui.tools.dialogs.AboutBox.reading_splash_screen_error", e.getMessage());
}
}
properties.setProperty("version", productVersion);
Plugin.initAboutTexts(properties);
return properties;
}
private static Properties createProperties(String productVersion) {
Properties properties = new Properties();
try {
URL propUrl = Tools.getResource(PROPERTY_FILE);
if (propUrl != null) {
InputStream in = propUrl.openStream();
properties.load(in);
in.close();
}
} catch (Exception e) {
//LogService.getGlobal().logError("Cannot read splash screen infos: " + e.getMessage());
LogService.getRoot().log(Level.SEVERE, "com.rapidminer.gui.tools.dialogs.AboutBox.reading_splash_screen_error", e.getMessage());
}
properties.setProperty("version", productVersion);
Plugin.initAboutTexts(properties);
return properties;
}
private static Properties createProperties(String productName, String productVersion, String licensor, String url, String text, boolean renderTextNextToLogo) {
Properties properties = new Properties();
properties.setProperty("name", productName);
properties.setProperty("version", productVersion);
properties.setProperty("licensor", licensor);
properties.setProperty("license", "URL: " + url);
properties.setProperty("more", text);
properties.setProperty("textNextToLogo", "" + renderTextNextToLogo);
properties.setProperty("url", url);
return properties;
}
}
| agpl-3.0 |
Tanaguru/Tanaguru | rules/accessiweb2.2/src/test/java/org/tanaguru/rules/accessiweb22/Aw22Rule08091Test.java | 17246 | /*
* Tanaguru - Automated webpage assessment
* Copyright (C) 2008-2015 Tanaguru.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contact us by mail: tanaguru AT tanaguru DOT org
*/
package org.tanaguru.rules.accessiweb22;
import java.util.LinkedHashSet;
import org.tanaguru.entity.audit.ProcessRemark;
import org.tanaguru.entity.audit.ProcessResult;
import org.tanaguru.entity.audit.SourceCodeRemark;
import org.tanaguru.entity.audit.TestSolution;
import org.tanaguru.rules.accessiweb22.test.Aw22RuleImplementationTestCase;
import org.tanaguru.rules.keystore.HtmlElementStore;
import org.tanaguru.rules.keystore.RemarkMessageStore;
/**
* Unit test class for the implementation of the rule 8.9.1 of the referential Accessiweb 2.2.
*
* @author jkowalczyk
*/
public class Aw22Rule08091Test extends Aw22RuleImplementationTestCase {
/**
* Default constructor
*/
public Aw22Rule08091Test (String testName){
super(testName);
}
@Override
protected void setUpRuleImplementationClassName() {
setRuleImplementationClassName(
"org.tanaguru.rules.accessiweb22.Aw22Rule08091");
}
@Override
protected void setUpWebResourceMap() {
getWebResourceMap().put("AW22.Test.8.9.1-2Failed-01",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-2Failed-01.html"));
getWebResourceMap().put("AW22.Test.8.9.1-2Failed-02",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-2Failed-02.html"));
getWebResourceMap().put("AW22.Test.8.9.1-2Failed-03",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-2Failed-03.html"));
getWebResourceMap().put("AW22.Test.8.9.1-2Failed-04",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-2Failed-04.html"));
getWebResourceMap().put("AW22.Test.8.9.1-3NMI-01",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-3NMI-01.html"));
getWebResourceMap().put("AW22.Test.8.9.1-3NMI-02",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-3NMI-02.html"));
getWebResourceMap().put("AW22.Test.8.9.1-3NMI-03",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-3NMI-03.html"));
getWebResourceMap().put("AW22.Test.8.9.1-3NMI-04",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-3NMI-04.html"));
getWebResourceMap().put("AW22.Test.8.9.1-3NMI-05",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-3NMI-05.html"));
getWebResourceMap().put("AW22.Test.8.9.1-3NMI-06",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-3NMI-06.html"));
getWebResourceMap().put("AW22.Test.8.9.1-3NMI-07",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "accessiweb22/Aw22Rule08091/AW22.Test.8.9.1-3NMI-07.html"));
}
@Override
protected void setProcess() {
//----------------------------------------------------------------------
//---------------------------2Failed-01---------------------------------
//----------------------------------------------------------------------
ProcessResult processResult = processPageTest("AW22.Test.8.9.1-2Failed-01");
// check number of elements in the page
assertEquals(12, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.FAILED, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
SourceCodeRemark sourceCodeRemark = ((SourceCodeRemark)((LinkedHashSet)processResult.getRemarkSet()).iterator().next());
assertEquals(TestSolution.FAILED, sourceCodeRemark.getIssue());
assertEquals(RemarkMessageStore.LINK_WITHOUT_TARGET_MSG, sourceCodeRemark.getMessageCode());
assertEquals(HtmlElementStore.A_ELEMENT, sourceCodeRemark.getTarget());
assertNotNull(sourceCodeRemark.getSnippet());
assertNull(sourceCodeRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------2Failed-02---------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-2Failed-02");
// check number of elements in the page
assertEquals(12, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.FAILED, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
sourceCodeRemark = ((SourceCodeRemark)((LinkedHashSet)processResult.getRemarkSet()).iterator().next());
assertEquals(TestSolution.FAILED, sourceCodeRemark.getIssue());
assertEquals(RemarkMessageStore.LINK_WITHOUT_TARGET_MSG, sourceCodeRemark.getMessageCode());
assertEquals(HtmlElementStore.A_ELEMENT, sourceCodeRemark.getTarget());
assertNotNull(sourceCodeRemark.getSnippet());
assertNull(sourceCodeRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------2Failed-03---------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-2Failed-03");
// check number of elements in the page
assertEquals(12, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.FAILED, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
sourceCodeRemark = ((SourceCodeRemark)((LinkedHashSet)processResult.getRemarkSet()).iterator().next());
assertEquals(TestSolution.FAILED, sourceCodeRemark.getIssue());
assertEquals(RemarkMessageStore.FIELDSET_NOT_WITHIN_FORM_MSG, sourceCodeRemark.getMessageCode());
assertEquals(HtmlElementStore.FIELDSET_ELEMENT, sourceCodeRemark.getTarget());
assertNotNull(sourceCodeRemark.getSnippet());
assertNull(sourceCodeRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------2Failed-04---------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-2Failed-04");
// check number of elements in the page
assertEquals(13, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.FAILED, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
sourceCodeRemark = ((SourceCodeRemark)((LinkedHashSet)processResult.getRemarkSet()).iterator().next());
assertEquals(TestSolution.FAILED, sourceCodeRemark.getIssue());
assertEquals(RemarkMessageStore.FIELDSET_NOT_WITHIN_FORM_MSG, sourceCodeRemark.getMessageCode());
assertEquals(HtmlElementStore.FIELDSET_ELEMENT, sourceCodeRemark.getTarget());
assertNotNull(sourceCodeRemark.getSnippet());
assertNull(sourceCodeRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------3NMI-03---------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-3NMI-01");
// check number of elements in the page
assertEquals(11, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.NEED_MORE_INFO, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
ProcessRemark processRemark = processResult.getRemarkSet().iterator().next();
assertEquals(TestSolution.NEED_MORE_INFO, processRemark.getIssue());
assertEquals(getMessageKey(RemarkMessageStore.NO_PATTERN_DETECTED_MSG), processRemark.getMessageCode());
assertNull(processRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------3NMI-02------------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-3NMI-02");
// check number of elements in the page
assertEquals(12, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.NEED_MORE_INFO, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
processRemark = processResult.getRemarkSet().iterator().next();
assertEquals(TestSolution.NEED_MORE_INFO, processRemark.getIssue());
assertEquals(getMessageKey(RemarkMessageStore.NO_PATTERN_DETECTED_MSG), processRemark.getMessageCode());
assertNull(processRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------3NMI-03------------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-3NMI-03");
// check number of elements in the page
assertEquals(12, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.NEED_MORE_INFO, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
processRemark = processResult.getRemarkSet().iterator().next();
assertEquals(TestSolution.NEED_MORE_INFO, processRemark.getIssue());
assertEquals(getMessageKey(RemarkMessageStore.NO_PATTERN_DETECTED_MSG), processRemark.getMessageCode());
assertNull(processRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------3NMI-04------------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-3NMI-04");
// check number of elements in the page
assertEquals(12, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.NEED_MORE_INFO, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
processRemark = processResult.getRemarkSet().iterator().next();
assertEquals(TestSolution.NEED_MORE_INFO, processRemark.getIssue());
assertEquals(getMessageKey(RemarkMessageStore.NO_PATTERN_DETECTED_MSG), processRemark.getMessageCode());
assertNull(processRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------3NMI-05------------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-3NMI-05");
// check number of elements in the page
assertEquals(13, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.NEED_MORE_INFO, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
processRemark = processResult.getRemarkSet().iterator().next();
assertEquals(TestSolution.NEED_MORE_INFO, processRemark.getIssue());
assertEquals(getMessageKey(RemarkMessageStore.NO_PATTERN_DETECTED_MSG), processRemark.getMessageCode());
assertNull(processRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------3NMI-06------------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-3NMI-06");
// check number of elements in the page
assertEquals(13, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.NEED_MORE_INFO, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
processRemark = processResult.getRemarkSet().iterator().next();
assertEquals(TestSolution.NEED_MORE_INFO, processRemark.getIssue());
assertEquals(getMessageKey(RemarkMessageStore.NO_PATTERN_DETECTED_MSG), processRemark.getMessageCode());
assertNull(processRemark.getElementList());
//----------------------------------------------------------------------
//---------------------------3NMI-07------------------------------------
//----------------------------------------------------------------------
processResult = processPageTest("AW22.Test.8.9.1-3NMI-07");
// check number of elements in the page
assertEquals(13, processResult.getElementCounter());
// check test result
assertEquals(TestSolution.NEED_MORE_INFO, processResult.getValue());
// check number of remarks and their value
assertEquals(1, processResult.getRemarkSet().size());
processRemark = processResult.getRemarkSet().iterator().next();
assertEquals(TestSolution.NEED_MORE_INFO, processRemark.getIssue());
assertEquals(getMessageKey(RemarkMessageStore.NO_PATTERN_DETECTED_MSG), processRemark.getMessageCode());
assertNull(processRemark.getElementList());
}
@Override
protected void setConsolidate() {
assertEquals(TestSolution.FAILED,
consolidate("AW22.Test.8.9.1-2Failed-01").getValue());
assertEquals(TestSolution.FAILED,
consolidate("AW22.Test.8.9.1-2Failed-02").getValue());
assertEquals(TestSolution.FAILED,
consolidate("AW22.Test.8.9.1-2Failed-03").getValue());
assertEquals(TestSolution.FAILED,
consolidate("AW22.Test.8.9.1-2Failed-04").getValue());
assertEquals(TestSolution.NEED_MORE_INFO,
consolidate("AW22.Test.8.9.1-3NMI-01").getValue());
assertEquals(TestSolution.NEED_MORE_INFO,
consolidate("AW22.Test.8.9.1-3NMI-02").getValue());
assertEquals(TestSolution.NEED_MORE_INFO,
consolidate("AW22.Test.8.9.1-3NMI-03").getValue());
assertEquals(TestSolution.NEED_MORE_INFO,
consolidate("AW22.Test.8.9.1-3NMI-04").getValue());
assertEquals(TestSolution.NEED_MORE_INFO,
consolidate("AW22.Test.8.9.1-3NMI-05").getValue());
assertEquals(TestSolution.NEED_MORE_INFO,
consolidate("AW22.Test.8.9.1-3NMI-06").getValue());
assertEquals(TestSolution.NEED_MORE_INFO,
consolidate("AW22.Test.8.9.1-3NMI-07").getValue());
}
/**
*
* @param msg
* @return the message suffixed with the test key identifier
*/
private String getMessageKey(String msg) {
StringBuilder strb = new StringBuilder(msg);
strb.append("_");
strb.append(getName());
return strb.toString();
}
} | agpl-3.0 |
openhealthcare/openMAXIMS | openmaxims_workspace/Core/src/ims/core/forms/reportparamsdialog/FormInfo.java | 2672 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.core.forms.reportparamsdialog;
public final class FormInfo extends ims.framework.FormInfo
{
private static final long serialVersionUID = 1L;
public FormInfo(Integer formId)
{
super(formId);
}
public String getNamespaceName()
{
return "Core";
}
public String getFormName()
{
return "ReportParamsDialog";
}
public int getWidth()
{
return 536;
}
public int getHeight()
{
return 440;
}
public String[] getContextVariables()
{
return new String[] { "_cv_Core.ImsReportId", "_cv_Admin.ReportSeedParsed" };
}
public String getLocalVariablesPrefix()
{
return "_lv_Core.ReportParamsDialog.__internal_x_context__" + String.valueOf(getFormId());
}
public ims.framework.FormInfo[] getComponentsFormInfo()
{
ims.framework.FormInfo[] componentsInfo = new ims.framework.FormInfo[0];
return componentsInfo;
}
public String getImagePath()
{
return "Images/Admin/reports_params_48.png";
}
}
| agpl-3.0 |
openhealthcare/openMAXIMS | openmaxims_workspace/Nursing/src/ims/nursing/forms/bradenscaleview/AccessLogic.java | 2006 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Neil McAnaspie using IMS Development Environment (version 1.51 build 2480.15886)
// Copyright (C) 1995-2006 IMS MAXIMS plc. All rights reserved.
package ims.nursing.forms.bradenscaleview;
import java.io.Serializable;
public final class AccessLogic extends BaseAccessLogic implements Serializable
{
private static final long serialVersionUID = 1L;
public boolean isAccessible()
{
if(!super.isAccessible())
return false;
// TODO: Add your conditions here.
return true;
}
}
| agpl-3.0 |
B3Partners/flamingo | viewer-admin/src/main/java/nl/b3p/viewer/admin/stripes/ConfigureSolrActionBean.java | 16079 | /*
* Copyright (C) 2013 B3Partners B.V.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package nl.b3p.viewer.admin.stripes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import javax.annotation.security.RolesAllowed;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletResponse;
import net.sourceforge.stripes.action.ActionBean;
import net.sourceforge.stripes.action.ActionBeanContext;
import net.sourceforge.stripes.action.After;
import net.sourceforge.stripes.action.Before;
import net.sourceforge.stripes.action.DefaultHandler;
import net.sourceforge.stripes.action.ForwardResolution;
import net.sourceforge.stripes.action.Resolution;
import net.sourceforge.stripes.action.StreamingResolution;
import net.sourceforge.stripes.action.StrictBinding;
import net.sourceforge.stripes.action.UrlBinding;
import net.sourceforge.stripes.controller.LifecycleStage;
import net.sourceforge.stripes.validation.SimpleError;
import net.sourceforge.stripes.validation.Validate;
import net.sourceforge.stripes.validation.ValidateNestedProperties;
import nl.b3p.i18n.LocalizableActionBean;
import nl.b3p.viewer.config.security.Group;
import nl.b3p.viewer.config.services.AttributeDescriptor;
import nl.b3p.viewer.config.services.FeatureSource;
import nl.b3p.viewer.config.services.SimpleFeatureType;
import nl.b3p.viewer.config.services.SolrConf;
import nl.b3p.viewer.solr.SolrInitializer;
import nl.b3p.viewer.solr.SolrUpdateJob;
import nl.b3p.web.WaitPageStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.stripesstuff.plugin.waitpage.WaitPage;
import org.stripesstuff.stripersist.Stripersist;
/**
*
* @author Meine Toonen
*/
@UrlBinding("/action/configuresolr")
@StrictBinding
@RolesAllowed({Group.ADMIN, Group.REGISTRY_ADMIN})
public class ConfigureSolrActionBean extends LocalizableActionBean {
private static final Log log = LogFactory.getLog(ConfigureSolrActionBean.class);
private static final String JSP = "/WEB-INF/jsp/services/solrconfig.jsp";
private static final String EDIT_JSP = "/WEB-INF/jsp/services/editsolrsource.jsp";
private static final String PROTOTYPE_JSP = "/WEB-INF/jsp/services/searchPrototype.jsp";
private List<FeatureSource> featureSources = new ArrayList();
private List<SimpleFeatureType> featureTypes = new ArrayList();
private ActionBeanContext context;
@Validate
@ValidateNestedProperties({
@Validate(field= "simpleFeatureType"),
@Validate(field= "name",required = true, on = "save")
})
private SolrConf solrConfiguration;
@Validate
private Long simpleFeatureTypeId;
@Validate
private Long[] indexAttributes;
@Validate
private Long[] resultAttributes;
private WaitPageStatus status;
@Validate
private String term;
@Validate
private Integer page;
@Validate
private Integer start;
@Validate
private Integer limit;
@Validate
private JSONArray filter;
private Boolean solrInitialized = true;
//<editor-fold defaultstate="collapsed" desc="getters & setters">
@Override
public ActionBeanContext getContext() {
return context;
}
@Override
public void setContext(ActionBeanContext context) {
this.context = context;
}
public SolrConf getSolrConfiguration() {
return solrConfiguration;
}
public void setSolrConfiguration(SolrConf solrConfiguration) {
this.solrConfiguration = solrConfiguration;
}
public List<FeatureSource> getFeatureSources() {
return featureSources;
}
public void setFeatureSources(List<FeatureSource> featureSources) {
this.featureSources = featureSources;
}
public Long[] getIndexAttributes() {
return indexAttributes;
}
public void setIndexAttributes(Long[] indexAttributes) {
this.indexAttributes = indexAttributes;
}
public Long[] getResultAttributes() {
return resultAttributes;
}
public void setResultAttributes(Long[] resultAttributes) {
this.resultAttributes = resultAttributes;
}
public List<SimpleFeatureType> getFeatureTypes() {
return featureTypes;
}
public void setFeatureTypes(List<SimpleFeatureType> featureTypes) {
this.featureTypes = featureTypes;
}
public Long getSimpleFeatureTypeId() {
return simpleFeatureTypeId;
}
public void setSimpleFeatureTypeId(Long simpleFeatureTypeId) {
this.simpleFeatureTypeId = simpleFeatureTypeId;
}
public WaitPageStatus getStatus() {
return status;
}
public void setStatus(WaitPageStatus status) {
this.status = status;
}
public String getTerm() {
return term;
}
public void setTerm(String term) {
this.term = term;
}
public Boolean getSolrInitialized() {
return solrInitialized;
}
public void setSolrInitialized(Boolean solrInitialized) {
this.solrInitialized = solrInitialized;
}
public Integer getPage() {
return page;
}
public void setPage(Integer page) {
this.page = page;
}
public Integer getStart() {
return start;
}
public void setStart(Integer start) {
this.start = start;
}
public Integer getLimit() {
return limit;
}
public void setLimit(Integer limit) {
this.limit = limit;
}
public JSONArray getFilter() {
return filter;
}
public void setFilter(JSONArray filter) {
this.filter = filter;
}
//</editor-fold>
@DefaultHandler
public Resolution view() throws SolrServerException {
SolrServer server = SolrInitializer.getServerInstance();
try{
SolrPingResponse resp = server.ping();
}catch(Exception e){
log.error("Solr ping exception", e);
this.context.getValidationErrors().addGlobalError(new SimpleError(getBundle().getString("viewer_admin.configuresolractionbean.solrnoconfig")));
solrInitialized = false;
}
return new ForwardResolution(JSP);
}
public Resolution edit() {
return new ForwardResolution(EDIT_JSP);
}
@WaitPage(path = "/WEB-INF/jsp/waitpage.jsp", delay = 2000, refresh = 1000, ajax = "/WEB-INF/jsp/waitpageajax.jsp")
public Resolution addToIndex() throws InterruptedException {
removeFromIndex();
status = new WaitPageStatus();
EntityManager em = Stripersist.getEntityManager();
SolrServer server = SolrInitializer.getServerInstance();
solrConfiguration = em.find(SolrConf.class, solrConfiguration.getId());
SolrUpdateJob.insertSolrConfigurationIntoIndex(solrConfiguration, em, status, server);
em.getTransaction().commit();
return new ForwardResolution(EDIT_JSP);
}
public Resolution removeFromIndex(){
EntityManager em = Stripersist.getEntityManager();
SolrServer server = SolrInitializer.getServerInstance();
removeConfigurationFromIndex(em, solrConfiguration, server);
em.getTransaction().commit();
return new ForwardResolution(EDIT_JSP);
}
public static void removeConfigurationFromIndex(EntityManager em, SolrConf conf, SolrServer server){
conf = em.find(SolrConf.class, conf.getId());
SolrUpdateJob.removeSolrConfigurationFromIndex(conf, em, server);
}
public Resolution cancel() {
return new ForwardResolution(EDIT_JSP);
}
public Resolution save() {
EntityManager em =Stripersist.getEntityManager();
solrConfiguration.getIndexAttributes().clear();
solrConfiguration.getResultAttributes().clear();
for (int i = 0; i < indexAttributes.length; i++) {
Long attributeId = indexAttributes[i];
AttributeDescriptor attribute = em.find(AttributeDescriptor.class, attributeId);
solrConfiguration.getIndexAttributes().add(attribute.getName());
}
for (int i = 0; i < resultAttributes.length; i++) {
Long attributeId = resultAttributes[i];
AttributeDescriptor attribute = em.find(AttributeDescriptor.class, attributeId);
solrConfiguration.getResultAttributes().add(attribute.getName());
}
em.persist(solrConfiguration);
em.getTransaction().commit();
return new ForwardResolution(EDIT_JSP);
}
public Resolution newSearchConfig() {
solrConfiguration = new SolrConf();
return new ForwardResolution(EDIT_JSP);
}
@After(on = {"edit", "save", "newSearchConfig"}, stages = LifecycleStage.BindingAndValidation)
public void loadLists(){
featureSources = Stripersist.getEntityManager().createQuery("from FeatureSource").getResultList();
if(solrConfiguration != null && solrConfiguration.getSimpleFeatureType() != null){
featureTypes = solrConfiguration.getSimpleFeatureType().getFeatureSource().getFeatureTypes();
}
}
public Resolution delete() {
EntityManager em = Stripersist.getEntityManager();
deleteSolrConfiguration(em, solrConfiguration, SolrInitializer.getServerInstance());
em.getTransaction().commit();
return new ForwardResolution(EDIT_JSP);
}
public static void deleteSolrConfiguration(EntityManager em, SolrConf conf, SolrServer server){
removeConfigurationFromIndex(em, conf, server);
em.remove(conf);
}
public Resolution getGridData() throws JSONException {
JSONArray jsonData = new JSONArray();
String filterName = "";
String lastUpdated = "";
if (this.getFilter() != null) {
for (int k = 0; k < this.getFilter().length(); k++) {
JSONObject j = this.getFilter().getJSONObject(k);
String property = j.getString("property");
String value = j.getString("value");
if (property.equals("name")) {
filterName = value;
}
if (property.equals("lastUpdated")) {
lastUpdated = value;
}
}
}
Session sess = (Session) Stripersist.getEntityManager().getDelegate();
Criteria c = sess.createCriteria(SolrConf.class);
if (filterName != null && filterName.length() > 0) {
Criterion nameCrit = Restrictions.ilike("name", filterName, MatchMode.ANYWHERE);
c.add(nameCrit);
}
if (lastUpdated != null && lastUpdated.length() > 0) {
Criterion lastUpdatedCrit = Restrictions.ilike("lastUpdated", lastUpdated, MatchMode.ANYWHERE);
c.add(lastUpdatedCrit);
}
int rowCount = c.list().size();
c.setMaxResults(limit);
c.setFirstResult(start);
List sources = c.list();
for (Iterator it = sources.iterator(); it.hasNext();) {
SolrConf config = (SolrConf) it.next();
JSONObject j = config.toJSON();
jsonData.put(j);
}
final JSONObject grid = new JSONObject();
grid.put("totalCount", rowCount);
grid.put("gridrows", jsonData);
return new StreamingResolution("application/json", grid.toString());
}
public Resolution getSearchconfigData() throws JSONException {
EntityManager em =Stripersist.getEntityManager();
List<SolrConf> configs = em.createQuery("FROM SolrConf").getResultList();
JSONArray searchconfigs = new JSONArray();
for (SolrConf solrConfig : configs) {
JSONObject config = new JSONObject();
config.put("id", solrConfig.getId());
config.put("name", solrConfig.getName());
searchconfigs.put(config);
}
return new StreamingResolution("application/json", searchconfigs.toString(4));
}
public Resolution getAttributesList() throws JSONException {
JSONArray jsonData = new JSONArray();
List<SimpleFeatureType> featureTypes= new ArrayList();
if(simpleFeatureTypeId != null && simpleFeatureTypeId != -1){
SimpleFeatureType sft = (SimpleFeatureType)Stripersist.getEntityManager().find(SimpleFeatureType.class, simpleFeatureTypeId);
if (sft!=null){
featureTypes.add(sft);
}
}else{
throw new IllegalArgumentException ("No simpleFeatureType id provided");
}
Session sess = (Session)Stripersist.getEntityManager().getDelegate();
Criteria c = sess.createCriteria(AttributeDescriptor.class);
/* Criteria for the all attribute descriptor ids of the feature types
* in featureTypes
*/
DetachedCriteria c2 = DetachedCriteria.forClass(SimpleFeatureType.class);
Collection ftIds = new ArrayList<Long>();
for (SimpleFeatureType sft : featureTypes) {
ftIds.add(sft.getId());
}
c2.add(Restrictions.in("id", ftIds));
c2.createAlias("attributes", "attr");
c2.setProjection(Projections.property("attr.id"));
c.add(org.hibernate.criterion.Property.forName("id").in(c2));
int rowCount = c.list().size();
List<AttributeDescriptor> attrs = c.list();
for(Iterator<AttributeDescriptor> it = attrs.iterator(); it.hasNext();){
AttributeDescriptor attr = it.next();
boolean indexChecked = false;
boolean resultChecked = false;
if(solrConfiguration != null){
for (String configAttribute : solrConfiguration.getIndexAttributes()) {
if(configAttribute.equals(attr.getName())){
indexChecked= true;
break;
}
}
for (String resultAttribute : solrConfiguration.getResultAttributes()) {
if(resultAttribute.equals(attr.getName())){
resultChecked = true;
break;
}
}
}
JSONObject j = new JSONObject();
j.put("id", attr.getId().intValue());
j.put("alias", attr.getAlias());
j.put("attribute", attr.getName());
j.put("indexChecked", indexChecked);
j.put("resultChecked", resultChecked);
jsonData.put(j);
}
final JSONObject grid = new JSONObject();
grid.put("totalCount", rowCount);
grid.put("gridrows", jsonData);
return new StreamingResolution("application/json") {
@Override
public void stream(HttpServletResponse response) throws Exception {
response.getWriter().print(grid.toString());
}
};
}
}
| agpl-3.0 |
FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/emergency/vo/domain/DischargeServicesAndAdviceVoAssembler.java | 33423 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
/*
* This code was generated
* Copyright (C) 1995-2004 IMS MAXIMS plc. All rights reserved.
* IMS Development Environment (version 1.80 build 5589.25814)
* WARNING: DO NOT MODIFY the content of this file
* Generated on 12/10/2015, 13:24
*
*/
package ims.emergency.vo.domain;
import ims.vo.domain.DomainObjectMap;
import java.util.HashMap;
import org.hibernate.proxy.HibernateProxy;
/**
* @author George Cristian Josan
*/
public class DischargeServicesAndAdviceVoAssembler
{
/**
* Copy one ValueObject to another
* @param valueObjectDest to be updated
* @param valueObjectSrc to copy values from
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVo copy(ims.emergency.vo.DischargeServicesAndAdviceVo valueObjectDest, ims.emergency.vo.DischargeServicesAndAdviceVo valueObjectSrc)
{
if (null == valueObjectSrc)
{
return valueObjectSrc;
}
valueObjectDest.setID_DischargeServicesAndAdvice(valueObjectSrc.getID_DischargeServicesAndAdvice());
valueObjectDest.setIsRIE(valueObjectSrc.getIsRIE());
// Patient
valueObjectDest.setPatient(valueObjectSrc.getPatient());
// Episode
valueObjectDest.setEpisode(valueObjectSrc.getEpisode());
// Attendance
valueObjectDest.setAttendance(valueObjectSrc.getAttendance());
// PatientMobility
valueObjectDest.setPatientMobility(valueObjectSrc.getPatientMobility());
// TransportArrangedType
valueObjectDest.setTransportArrangedType(valueObjectSrc.getTransportArrangedType());
// TransportDateTime
valueObjectDest.setTransportDateTime(valueObjectSrc.getTransportDateTime());
// Comments
valueObjectDest.setComments(valueObjectSrc.getComments());
// BookingNo
valueObjectDest.setBookingNo(valueObjectSrc.getBookingNo());
// SupportNetworkFamily
valueObjectDest.setSupportNetworkFamily(valueObjectSrc.getSupportNetworkFamily());
// SupportNetworkProfessionals
valueObjectDest.setSupportNetworkProfessionals(valueObjectSrc.getSupportNetworkProfessionals());
// SupportNetworkServices
valueObjectDest.setSupportNetworkServices(valueObjectSrc.getSupportNetworkServices());
// Equipment
valueObjectDest.setEquipment(valueObjectSrc.getEquipment());
return valueObjectDest;
}
/**
* Create the ValueObject collection to hold the set of DomainObjects.
* This is a convenience method only.
* It is intended to be used when one called to an Assembler is made.
* If more than one call to an Assembler is made then #createDischargeServicesAndAdviceVoCollectionFromDischargeServicesAndAdvice(DomainObjectMap, Set) should be used.
* @param domainObjectSet - Set of ims.emergency.domain.objects.DischargeServicesAndAdvice objects.
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVoCollection createDischargeServicesAndAdviceVoCollectionFromDischargeServicesAndAdvice(java.util.Set domainObjectSet)
{
return createDischargeServicesAndAdviceVoCollectionFromDischargeServicesAndAdvice(new DomainObjectMap(), domainObjectSet);
}
/**
* Create the ValueObject collection to hold the set of DomainObjects.
* @param map - maps DomainObjects to created ValueObjects
* @param domainObjectSet - Set of ims.emergency.domain.objects.DischargeServicesAndAdvice objects.
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVoCollection createDischargeServicesAndAdviceVoCollectionFromDischargeServicesAndAdvice(DomainObjectMap map, java.util.Set domainObjectSet)
{
ims.emergency.vo.DischargeServicesAndAdviceVoCollection voList = new ims.emergency.vo.DischargeServicesAndAdviceVoCollection();
if ( null == domainObjectSet )
{
return voList;
}
int rieCount=0;
int activeCount=0;
java.util.Iterator iterator = domainObjectSet.iterator();
while( iterator.hasNext() )
{
ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject = (ims.emergency.domain.objects.DischargeServicesAndAdvice) iterator.next();
ims.emergency.vo.DischargeServicesAndAdviceVo vo = create(map, domainObject);
if (vo != null)
voList.add(vo);
if (domainObject != null)
{
if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true)
rieCount++;
else
activeCount++;
}
}
voList.setRieCount(rieCount);
voList.setActiveCount(activeCount);
return voList;
}
/**
* Create the ValueObject collection to hold the list of DomainObjects.
* @param domainObjectList - List of ims.emergency.domain.objects.DischargeServicesAndAdvice objects.
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVoCollection createDischargeServicesAndAdviceVoCollectionFromDischargeServicesAndAdvice(java.util.List domainObjectList)
{
return createDischargeServicesAndAdviceVoCollectionFromDischargeServicesAndAdvice(new DomainObjectMap(), domainObjectList);
}
/**
* Create the ValueObject collection to hold the list of DomainObjects.
* @param map - maps DomainObjects to created ValueObjects
* @param domainObjectList - List of ims.emergency.domain.objects.DischargeServicesAndAdvice objects.
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVoCollection createDischargeServicesAndAdviceVoCollectionFromDischargeServicesAndAdvice(DomainObjectMap map, java.util.List domainObjectList)
{
ims.emergency.vo.DischargeServicesAndAdviceVoCollection voList = new ims.emergency.vo.DischargeServicesAndAdviceVoCollection();
if ( null == domainObjectList )
{
return voList;
}
int rieCount=0;
int activeCount=0;
for (int i = 0; i < domainObjectList.size(); i++)
{
ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject = (ims.emergency.domain.objects.DischargeServicesAndAdvice) domainObjectList.get(i);
ims.emergency.vo.DischargeServicesAndAdviceVo vo = create(map, domainObject);
if (vo != null)
voList.add(vo);
if (domainObject != null)
{
if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true)
rieCount++;
else
activeCount++;
}
}
voList.setRieCount(rieCount);
voList.setActiveCount(activeCount);
return voList;
}
/**
* Create the ims.emergency.domain.objects.DischargeServicesAndAdvice set from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
public static java.util.Set extractDischargeServicesAndAdviceSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.DischargeServicesAndAdviceVoCollection voCollection)
{
return extractDischargeServicesAndAdviceSet(domainFactory, voCollection, null, new HashMap());
}
public static java.util.Set extractDischargeServicesAndAdviceSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.DischargeServicesAndAdviceVoCollection voCollection, java.util.Set domainObjectSet, HashMap domMap)
{
int size = (null == voCollection) ? 0 : voCollection.size();
if (domainObjectSet == null)
{
domainObjectSet = new java.util.HashSet();
}
java.util.Set newSet = new java.util.HashSet();
for(int i=0; i<size; i++)
{
ims.emergency.vo.DischargeServicesAndAdviceVo vo = voCollection.get(i);
ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject = DischargeServicesAndAdviceVoAssembler.extractDischargeServicesAndAdvice(domainFactory, vo, domMap);
//TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it.
if (domainObject == null)
{
continue;
}
//Trying to avoid the hibernate collection being marked as dirty via its public interface methods. (like add)
if (!domainObjectSet.contains(domainObject)) domainObjectSet.add(domainObject);
newSet.add(domainObject);
}
java.util.Set removedSet = new java.util.HashSet();
java.util.Iterator iter = domainObjectSet.iterator();
//Find out which objects need to be removed
while (iter.hasNext())
{
ims.domain.DomainObject o = (ims.domain.DomainObject)iter.next();
if ((o == null || o.getIsRIE() == null || !o.getIsRIE().booleanValue()) && !newSet.contains(o))
{
removedSet.add(o);
}
}
iter = removedSet.iterator();
//Remove the unwanted objects
while (iter.hasNext())
{
domainObjectSet.remove(iter.next());
}
return domainObjectSet;
}
/**
* Create the ims.emergency.domain.objects.DischargeServicesAndAdvice list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
public static java.util.List extractDischargeServicesAndAdviceList(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.DischargeServicesAndAdviceVoCollection voCollection)
{
return extractDischargeServicesAndAdviceList(domainFactory, voCollection, null, new HashMap());
}
public static java.util.List extractDischargeServicesAndAdviceList(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.DischargeServicesAndAdviceVoCollection voCollection, java.util.List domainObjectList, HashMap domMap)
{
int size = (null == voCollection) ? 0 : voCollection.size();
if (domainObjectList == null)
{
domainObjectList = new java.util.ArrayList();
}
for(int i=0; i<size; i++)
{
ims.emergency.vo.DischargeServicesAndAdviceVo vo = voCollection.get(i);
ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject = DischargeServicesAndAdviceVoAssembler.extractDischargeServicesAndAdvice(domainFactory, vo, domMap);
//TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it.
if (domainObject == null)
{
continue;
}
int domIdx = domainObjectList.indexOf(domainObject);
if (domIdx == -1)
{
domainObjectList.add(i, domainObject);
}
else if (i != domIdx && i < domainObjectList.size())
{
Object tmp = domainObjectList.get(i);
domainObjectList.set(i, domainObjectList.get(domIdx));
domainObjectList.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int i1=domainObjectList.size();
while (i1 > size)
{
domainObjectList.remove(i1-1);
i1=domainObjectList.size();
}
return domainObjectList;
}
/**
* Create the ValueObject from the ims.emergency.domain.objects.DischargeServicesAndAdvice object.
* @param domainObject ims.emergency.domain.objects.DischargeServicesAndAdvice
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVo create(ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject)
{
if (null == domainObject)
{
return null;
}
DomainObjectMap map = new DomainObjectMap();
return create(map, domainObject);
}
/**
* Create the ValueObject from the ims.emergency.domain.objects.DischargeServicesAndAdvice object.
* @param map DomainObjectMap of DomainObjects to already created ValueObjects.
* @param domainObject
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVo create(DomainObjectMap map, ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject)
{
if (null == domainObject)
{
return null;
}
// check if the domainObject already has a valueObject created for it
ims.emergency.vo.DischargeServicesAndAdviceVo valueObject = (ims.emergency.vo.DischargeServicesAndAdviceVo) map.getValueObject(domainObject, ims.emergency.vo.DischargeServicesAndAdviceVo.class);
if ( null == valueObject )
{
valueObject = new ims.emergency.vo.DischargeServicesAndAdviceVo(domainObject.getId(), domainObject.getVersion());
map.addValueObject(domainObject, valueObject);
valueObject = insert(map, valueObject, domainObject);
}
return valueObject;
}
/**
* Update the ValueObject with the Domain Object.
* @param valueObject to be updated
* @param domainObject ims.emergency.domain.objects.DischargeServicesAndAdvice
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVo insert(ims.emergency.vo.DischargeServicesAndAdviceVo valueObject, ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject)
{
if (null == domainObject)
{
return valueObject;
}
DomainObjectMap map = new DomainObjectMap();
return insert(map, valueObject, domainObject);
}
/**
* Update the ValueObject with the Domain Object.
* @param map DomainObjectMap of DomainObjects to already created ValueObjects.
* @param valueObject to be updated
* @param domainObject ims.emergency.domain.objects.DischargeServicesAndAdvice
*/
public static ims.emergency.vo.DischargeServicesAndAdviceVo insert(DomainObjectMap map, ims.emergency.vo.DischargeServicesAndAdviceVo valueObject, ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject)
{
if (null == domainObject)
{
return valueObject;
}
if (null == map)
{
map = new DomainObjectMap();
}
valueObject.setID_DischargeServicesAndAdvice(domainObject.getId());
valueObject.setIsRIE(domainObject.getIsRIE());
// If this is a recordedInError record, and the domainObject
// value isIncludeRecord has not been set, then we return null and
// not the value object
if (valueObject.getIsRIE() != null && valueObject.getIsRIE().booleanValue() == true && !domainObject.isIncludeRecord())
return null;
// If this is not a recordedInError record, and the domainObject
// value isIncludeRecord has been set, then we return null and
// not the value object
if ((valueObject.getIsRIE() == null || valueObject.getIsRIE().booleanValue() == false) && domainObject.isIncludeRecord())
return null;
// Patient
if (domainObject.getPatient() != null)
{
if(domainObject.getPatient() instanceof HibernateProxy) // If the proxy is set, there is no need to lazy load, the proxy knows the id already.
{
HibernateProxy p = (HibernateProxy) domainObject.getPatient();
int id = Integer.parseInt(p.getHibernateLazyInitializer().getIdentifier().toString());
valueObject.setPatient(new ims.core.patient.vo.PatientRefVo(id, -1));
}
else
{
valueObject.setPatient(new ims.core.patient.vo.PatientRefVo(domainObject.getPatient().getId(), domainObject.getPatient().getVersion()));
}
}
// Episode
if (domainObject.getEpisode() != null)
{
if(domainObject.getEpisode() instanceof HibernateProxy) // If the proxy is set, there is no need to lazy load, the proxy knows the id already.
{
HibernateProxy p = (HibernateProxy) domainObject.getEpisode();
int id = Integer.parseInt(p.getHibernateLazyInitializer().getIdentifier().toString());
valueObject.setEpisode(new ims.core.admin.vo.EpisodeOfCareRefVo(id, -1));
}
else
{
valueObject.setEpisode(new ims.core.admin.vo.EpisodeOfCareRefVo(domainObject.getEpisode().getId(), domainObject.getEpisode().getVersion()));
}
}
// Attendance
if (domainObject.getAttendance() != null)
{
if(domainObject.getAttendance() instanceof HibernateProxy) // If the proxy is set, there is no need to lazy load, the proxy knows the id already.
{
HibernateProxy p = (HibernateProxy) domainObject.getAttendance();
int id = Integer.parseInt(p.getHibernateLazyInitializer().getIdentifier().toString());
valueObject.setAttendance(new ims.core.admin.vo.CareContextRefVo(id, -1));
}
else
{
valueObject.setAttendance(new ims.core.admin.vo.CareContextRefVo(domainObject.getAttendance().getId(), domainObject.getAttendance().getVersion()));
}
}
// PatientMobility
java.util.List listPatientMobility = domainObject.getPatientMobility();
ims.core.vo.lookups.PatientMobilityCollection PatientMobility = new ims.core.vo.lookups.PatientMobilityCollection();
for(java.util.Iterator iterator = listPatientMobility.iterator(); iterator.hasNext(); )
{
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
ims.domain.lookups.LookupInstance instance =
(ims.domain.lookups.LookupInstance) iterator.next();
if (instance.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance.getImage().getImageId(), instance.getImage().getImagePath());
}
else
{
img = null;
}
color = instance.getColor();
if (color != null)
color.getValue();
ims.core.vo.lookups.PatientMobility voInstance = new ims.core.vo.lookups.PatientMobility(instance.getId(),instance.getText(), instance.isActive(), null, img, color);
ims.core.vo.lookups.PatientMobility parentVoInstance = voInstance;
ims.domain.lookups.LookupInstance parent = instance.getParent();
while (parent != null)
{
if (parent.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent.getImage().getImageId(), parent.getImage().getImagePath());
}
else
{
img = null;
}
color = parent.getColor();
if (color != null)
color.getValue();
parentVoInstance.setParent(new ims.core.vo.lookups.PatientMobility(parent.getId(),parent.getText(), parent.isActive(), null, img, color));
parentVoInstance = parentVoInstance.getParent();
parent = parent.getParent();
}
PatientMobility.add(voInstance);
}
valueObject.setPatientMobility( PatientMobility );
// TransportArrangedType
ims.domain.lookups.LookupInstance instance5 = domainObject.getTransportArrangedType();
if ( null != instance5 ) {
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
img = null;
if (instance5.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance5.getImage().getImageId(), instance5.getImage().getImagePath());
}
color = instance5.getColor();
if (color != null)
color.getValue();
ims.scheduling.vo.lookups.ApptTransportType voLookup5 = new ims.scheduling.vo.lookups.ApptTransportType(instance5.getId(),instance5.getText(), instance5.isActive(), null, img, color);
ims.scheduling.vo.lookups.ApptTransportType parentVoLookup5 = voLookup5;
ims.domain.lookups.LookupInstance parent5 = instance5.getParent();
while (parent5 != null)
{
if (parent5.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent5.getImage().getImageId(), parent5.getImage().getImagePath() );
}
else
{
img = null;
}
color = parent5.getColor();
if (color != null)
color.getValue();
parentVoLookup5.setParent(new ims.scheduling.vo.lookups.ApptTransportType(parent5.getId(),parent5.getText(), parent5.isActive(), null, img, color));
parentVoLookup5 = parentVoLookup5.getParent();
parent5 = parent5.getParent();
}
valueObject.setTransportArrangedType(voLookup5);
}
// TransportDateTime
java.util.Date TransportDateTime = domainObject.getTransportDateTime();
if ( null != TransportDateTime )
{
valueObject.setTransportDateTime(new ims.framework.utils.DateTime(TransportDateTime) );
}
// Comments
valueObject.setComments(domainObject.getComments());
// BookingNo
valueObject.setBookingNo(domainObject.getBookingNo());
// SupportNetworkFamily
valueObject.setSupportNetworkFamily(ims.core.vo.domain.SupportNetworkFamilyAssembler.createSupportNetworkFamilyCollectionFromSupportNetworkFamily(map, domainObject.getSupportNetworkFamily()) );
// SupportNetworkProfessionals
valueObject.setSupportNetworkProfessionals(ims.core.vo.domain.SupportNetworkProfessionalVoAssembler.createSupportNetworkProfessionalVoCollectionFromSupportNetworkProfessional(map, domainObject.getSupportNetworkProfessionals()) );
// SupportNetworkServices
valueObject.setSupportNetworkServices(ims.core.vo.domain.SupportNetworkServicesVoAssembler.createSupportNetworkServicesVoCollectionFromSupportNetworkServices(map, domainObject.getSupportNetworkServices()) );
// Equipment
java.util.List listEquipment = domainObject.getEquipment();
ims.emergency.vo.lookups.DischargequipmentCollection Equipment = new ims.emergency.vo.lookups.DischargequipmentCollection();
for(java.util.Iterator iterator = listEquipment.iterator(); iterator.hasNext(); )
{
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
ims.domain.lookups.LookupInstance instance =
(ims.domain.lookups.LookupInstance) iterator.next();
if (instance.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance.getImage().getImageId(), instance.getImage().getImagePath());
}
else
{
img = null;
}
color = instance.getColor();
if (color != null)
color.getValue();
ims.emergency.vo.lookups.Dischargequipment voInstance = new ims.emergency.vo.lookups.Dischargequipment(instance.getId(),instance.getText(), instance.isActive(), null, img, color);
ims.emergency.vo.lookups.Dischargequipment parentVoInstance = voInstance;
ims.domain.lookups.LookupInstance parent = instance.getParent();
while (parent != null)
{
if (parent.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent.getImage().getImageId(), parent.getImage().getImagePath());
}
else
{
img = null;
}
color = parent.getColor();
if (color != null)
color.getValue();
parentVoInstance.setParent(new ims.emergency.vo.lookups.Dischargequipment(parent.getId(),parent.getText(), parent.isActive(), null, img, color));
parentVoInstance = parentVoInstance.getParent();
parent = parent.getParent();
}
Equipment.add(voInstance);
}
valueObject.setEquipment( Equipment );
return valueObject;
}
/**
* Create the domain object from the value object.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param valueObject - extract the domain object fields from this.
*/
public static ims.emergency.domain.objects.DischargeServicesAndAdvice extractDischargeServicesAndAdvice(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.DischargeServicesAndAdviceVo valueObject)
{
return extractDischargeServicesAndAdvice(domainFactory, valueObject, new HashMap());
}
public static ims.emergency.domain.objects.DischargeServicesAndAdvice extractDischargeServicesAndAdvice(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.DischargeServicesAndAdviceVo valueObject, HashMap domMap)
{
if (null == valueObject)
{
return null;
}
Integer id = valueObject.getID_DischargeServicesAndAdvice();
ims.emergency.domain.objects.DischargeServicesAndAdvice domainObject = null;
if ( null == id)
{
if (domMap.get(valueObject) != null)
{
return (ims.emergency.domain.objects.DischargeServicesAndAdvice)domMap.get(valueObject);
}
// ims.emergency.vo.DischargeServicesAndAdviceVo ID_DischargeServicesAndAdvice field is unknown
domainObject = new ims.emergency.domain.objects.DischargeServicesAndAdvice();
domMap.put(valueObject, domainObject);
}
else
{
String key = (valueObject.getClass().getName() + "__" + valueObject.getID_DischargeServicesAndAdvice());
if (domMap.get(key) != null)
{
return (ims.emergency.domain.objects.DischargeServicesAndAdvice)domMap.get(key);
}
domainObject = (ims.emergency.domain.objects.DischargeServicesAndAdvice) domainFactory.getDomainObject(ims.emergency.domain.objects.DischargeServicesAndAdvice.class, id );
//TODO: Not sure how this should be handled. Effectively it must be a staleobject exception, but maybe should be handled as that further up.
if (domainObject == null)
return null;
domMap.put(key, domainObject);
}
domainObject.setVersion(valueObject.getVersion_DischargeServicesAndAdvice());
ims.core.patient.domain.objects.Patient value1 = null;
if ( null != valueObject.getPatient() )
{
if (valueObject.getPatient().getBoId() == null)
{
if (domMap.get(valueObject.getPatient()) != null)
{
value1 = (ims.core.patient.domain.objects.Patient)domMap.get(valueObject.getPatient());
}
}
else if (valueObject.getBoVersion() == -1) // RefVo was not modified since obtained from the Assembler, no need to update the BO field
{
value1 = domainObject.getPatient();
}
else
{
value1 = (ims.core.patient.domain.objects.Patient)domainFactory.getDomainObject(ims.core.patient.domain.objects.Patient.class, valueObject.getPatient().getBoId());
}
}
domainObject.setPatient(value1);
ims.core.admin.domain.objects.EpisodeOfCare value2 = null;
if ( null != valueObject.getEpisode() )
{
if (valueObject.getEpisode().getBoId() == null)
{
if (domMap.get(valueObject.getEpisode()) != null)
{
value2 = (ims.core.admin.domain.objects.EpisodeOfCare)domMap.get(valueObject.getEpisode());
}
}
else if (valueObject.getBoVersion() == -1) // RefVo was not modified since obtained from the Assembler, no need to update the BO field
{
value2 = domainObject.getEpisode();
}
else
{
value2 = (ims.core.admin.domain.objects.EpisodeOfCare)domainFactory.getDomainObject(ims.core.admin.domain.objects.EpisodeOfCare.class, valueObject.getEpisode().getBoId());
}
}
domainObject.setEpisode(value2);
ims.core.admin.domain.objects.CareContext value3 = null;
if ( null != valueObject.getAttendance() )
{
if (valueObject.getAttendance().getBoId() == null)
{
if (domMap.get(valueObject.getAttendance()) != null)
{
value3 = (ims.core.admin.domain.objects.CareContext)domMap.get(valueObject.getAttendance());
}
}
else if (valueObject.getBoVersion() == -1) // RefVo was not modified since obtained from the Assembler, no need to update the BO field
{
value3 = domainObject.getAttendance();
}
else
{
value3 = (ims.core.admin.domain.objects.CareContext)domainFactory.getDomainObject(ims.core.admin.domain.objects.CareContext.class, valueObject.getAttendance().getBoId());
}
}
domainObject.setAttendance(value3);
ims.core.vo.lookups.PatientMobilityCollection collection4 =
valueObject.getPatientMobility();
java.util.List domainPatientMobility = domainObject.getPatientMobility();;
int collection4Size=0;
if (collection4 == null)
{
domainPatientMobility = new java.util.ArrayList(0);
}
else
{
collection4Size = collection4.size();
}
for(int i=0; i<collection4Size; i++)
{
int instanceId = collection4.get(i).getID();
ims.domain.lookups.LookupInstanceRef dom =new ims.domain.lookups.LookupInstanceRef(domainFactory.getLookupInstance(instanceId));
int domIdx = domainPatientMobility.indexOf(dom);
if (domIdx == -1)
{
domainPatientMobility.add(i, dom);
}
else if (i != domIdx && i < domainPatientMobility.size())
{
Object tmp = domainPatientMobility.get(i);
domainPatientMobility.set(i, domainPatientMobility.get(domIdx));
domainPatientMobility.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int j4 = domainPatientMobility.size();
while (j4 > collection4Size)
{
domainPatientMobility.remove(j4-1);
j4 = domainPatientMobility.size();
}
domainObject.setPatientMobility(domainPatientMobility);
// create LookupInstance from vo LookupType
ims.domain.lookups.LookupInstance value5 = null;
if ( null != valueObject.getTransportArrangedType() )
{
value5 =
domainFactory.getLookupInstance(valueObject.getTransportArrangedType().getID());
}
domainObject.setTransportArrangedType(value5);
ims.framework.utils.DateTime dateTime6 = valueObject.getTransportDateTime();
java.util.Date value6 = null;
if ( dateTime6 != null )
{
value6 = dateTime6.getJavaDate();
}
domainObject.setTransportDateTime(value6);
//This is to overcome a bug in both Sybase and Oracle which prevents them from storing an empty string correctly
//Sybase stores it as a single space, Oracle stores it as NULL. This fix will make them consistent at least.
if (valueObject.getComments() != null && valueObject.getComments().equals(""))
{
valueObject.setComments(null);
}
domainObject.setComments(valueObject.getComments());
//This is to overcome a bug in both Sybase and Oracle which prevents them from storing an empty string correctly
//Sybase stores it as a single space, Oracle stores it as NULL. This fix will make them consistent at least.
if (valueObject.getBookingNo() != null && valueObject.getBookingNo().equals(""))
{
valueObject.setBookingNo(null);
}
domainObject.setBookingNo(valueObject.getBookingNo());
domainObject.setSupportNetworkFamily(ims.core.vo.domain.SupportNetworkFamilyAssembler.extractSupportNetworkFamilyList(domainFactory, valueObject.getSupportNetworkFamily(), domainObject.getSupportNetworkFamily(), domMap));
domainObject.setSupportNetworkProfessionals(ims.core.vo.domain.SupportNetworkProfessionalVoAssembler.extractSupportNetworkProfessionalList(domainFactory, valueObject.getSupportNetworkProfessionals(), domainObject.getSupportNetworkProfessionals(), domMap));
domainObject.setSupportNetworkServices(ims.core.vo.domain.SupportNetworkServicesVoAssembler.extractSupportNetworkServicesList(domainFactory, valueObject.getSupportNetworkServices(), domainObject.getSupportNetworkServices(), domMap));
ims.emergency.vo.lookups.DischargequipmentCollection collection12 =
valueObject.getEquipment();
java.util.List domainEquipment = domainObject.getEquipment();;
int collection12Size=0;
if (collection12 == null)
{
domainEquipment = new java.util.ArrayList(0);
}
else
{
collection12Size = collection12.size();
}
for(int i=0; i<collection12Size; i++)
{
int instanceId = collection12.get(i).getID();
ims.domain.lookups.LookupInstanceRef dom =new ims.domain.lookups.LookupInstanceRef(domainFactory.getLookupInstance(instanceId));
int domIdx = domainEquipment.indexOf(dom);
if (domIdx == -1)
{
domainEquipment.add(i, dom);
}
else if (i != domIdx && i < domainEquipment.size())
{
Object tmp = domainEquipment.get(i);
domainEquipment.set(i, domainEquipment.get(domIdx));
domainEquipment.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int j12 = domainEquipment.size();
while (j12 > collection12Size)
{
domainEquipment.remove(j12-1);
j12 = domainEquipment.size();
}
domainObject.setEquipment(domainEquipment);
return domainObject;
}
}
| agpl-3.0 |
halfspiral/tuxguitar | TuxGuitar-ui-toolkit-jfx/src/org/herac/tuxguitar/ui/jfx/resource/JFXCursor.java | 811 | package org.herac.tuxguitar.ui.jfx.resource;
import java.util.HashMap;
import java.util.Map;
import javafx.scene.Cursor;
import org.herac.tuxguitar.ui.resource.UICursor;
public class JFXCursor {
private static final Map<UICursor, Cursor> CURSOR_MAP = JFXCursor.createCursorMap();
private JFXCursor() {
super();
}
private static Map<UICursor, Cursor> createCursorMap() {
Map<UICursor, Cursor> cursorMap = new HashMap<UICursor, Cursor>();
cursorMap.put(UICursor.NORMAL, Cursor.DEFAULT);
cursorMap.put(UICursor.WAIT, Cursor.WAIT);
cursorMap.put(UICursor.HAND, Cursor.HAND);
cursorMap.put(UICursor.SIZEWE, Cursor.H_RESIZE);
cursorMap.put(UICursor.SIZENS, Cursor.V_RESIZE);
return cursorMap;
}
public static Cursor getCursor(UICursor cursor) {
return CURSOR_MAP.get(cursor);
}
}
| lgpl-2.1 |
whdc/ieo-beast | src/dr/evomodel/treelikelihood/AbstractLikelihoodCore.java | 18890 | /*
* AbstractLikelihoodCore.java
*
* Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodel.treelikelihood;
/**
* AbstractLikelihoodCore - An abstract base class for LikelihoodCores
*
* @author Andrew Rambaut
* @version $Id: AbstractLikelihoodCore.java,v 1.11 2006/08/30 16:02:42 rambaut Exp $
*/
public abstract class AbstractLikelihoodCore implements LikelihoodCore {
protected int stateCount;
protected int nodeCount;
protected int patternCount;
protected int partialsSize;
protected int matrixSize;
protected int matrixCount;
protected boolean integrateCategories;
protected double[][][] partials;
protected int[][] states;
protected double[][][] matrices;
protected int[] currentMatricesIndices;
protected int[] storedMatricesIndices;
protected int[] currentPartialsIndices;
protected int[] storedPartialsIndices;
protected boolean useScaling = false;
protected double[][][] scalingFactors;
private double scalingThreshold = 1.0E-100;
/**
* Constructor
*
* @param stateCount number of states
*/
public AbstractLikelihoodCore(int stateCount) {
this.stateCount = stateCount;
}
/**
* initializes partial likelihood arrays.
*
* @param nodeCount the number of nodes in the tree
* @param patternCount the number of patterns
* @param matrixCount the number of matrices (i.e., number of categories)
* @param integrateCategories whether sites are being integrated over all matrices
*/
public void initialize(int nodeCount, int patternCount, int matrixCount, boolean integrateCategories) {
this.nodeCount = nodeCount;
this.patternCount = patternCount;
this.matrixCount = matrixCount;
this.integrateCategories = integrateCategories;
if (integrateCategories) {
partialsSize = patternCount * stateCount * matrixCount;
} else {
partialsSize = patternCount * stateCount;
}
partials = new double[2][nodeCount][];
currentMatricesIndices = new int[nodeCount];
storedMatricesIndices = new int[nodeCount];
currentPartialsIndices = new int[nodeCount];
storedPartialsIndices = new int[nodeCount];
states = new int[nodeCount][];
for (int i = 0; i < nodeCount; i++) {
partials[0][i] = null;
partials[1][i] = null;
states[i] = null;
}
matrixSize = stateCount * stateCount;
matrices = new double[2][nodeCount][matrixCount * matrixSize];
}
/**
* cleans up and deallocates arrays.
*/
public void finalize() throws java.lang.Throwable {
super.finalize();
nodeCount = 0;
patternCount = 0;
matrixCount = 0;
partials = null;
currentPartialsIndices = null;
storedPartialsIndices = null;
states = null;
matrices = null;
currentMatricesIndices = null;
storedMatricesIndices = null;
scalingFactors = null;
}
public void setUseScaling(boolean useScaling) {
this.useScaling = useScaling;
if (useScaling) {
scalingFactors = new double[2][nodeCount][patternCount];
}
}
/**
* Allocates partials for a node
*/
public void createNodePartials(int nodeIndex) {
this.partials[0][nodeIndex] = new double[partialsSize];
this.partials[1][nodeIndex] = new double[partialsSize];
}
/**
* Sets partials for a node
*/
public void setNodePartials(int nodeIndex, double[] partials) {
if (this.partials[0][nodeIndex] == null) {
createNodePartials(nodeIndex);
}
if (partials.length < partialsSize) {
int k = 0;
for (int i = 0; i < matrixCount; i++) {
System.arraycopy(partials, 0, this.partials[0][nodeIndex], k, partials.length);
k += partials.length;
}
} else {
System.arraycopy(partials, 0, this.partials[0][nodeIndex], 0, partials.length);
}
}
/**
* Allocates states for a node
*/
public void createNodeStates(int nodeIndex) {
this.states[nodeIndex] = new int[patternCount];
}
/**
* Sets states for a node
*/
public void setNodeStates(int nodeIndex, int[] states) {
if (this.states[nodeIndex] == null) {
createNodeStates(nodeIndex);
}
System.arraycopy(states, 0, this.states[nodeIndex], 0, patternCount);
}
/**
* Gets states for a node
*/
public void getNodeStates(int nodeIndex, int[] states) {
System.arraycopy(this.states[nodeIndex], 0, states, 0, patternCount);
}
public void setNodeMatrixForUpdate(int nodeIndex) {
currentMatricesIndices[nodeIndex] = 1 - currentMatricesIndices[nodeIndex];
}
/**
* Sets probability matrix for a node
*/
public void setNodeMatrix(int nodeIndex, int matrixIndex, double[] matrix) {
System.arraycopy(matrix, 0, matrices[currentMatricesIndices[nodeIndex]][nodeIndex],
matrixIndex * matrixSize, matrixSize);
}
/**
* Gets probability matrix for a node
*/
public void getNodeMatrix(int nodeIndex, int matrixIndex, double[] matrix) {
System.arraycopy(matrices[currentMatricesIndices[nodeIndex]][nodeIndex],
matrixIndex * matrixSize, matrix, 0, matrixSize);
}
public void setNodePartialsForUpdate(int nodeIndex) {
currentPartialsIndices[nodeIndex] = 1 - currentPartialsIndices[nodeIndex];
}
/**
* Sets the currently updating node partials for node nodeIndex. This may
* need to repeatedly copy the partials for the different category partitions
*/
public void setCurrentNodePartials(int nodeIndex, double[] partials) {
if (partials.length < partialsSize) {
int k = 0;
for (int i = 0; i < matrixCount; i++) {
System.arraycopy(partials, 0, this.partials[currentPartialsIndices[nodeIndex]][nodeIndex], k, partials.length);
k += partials.length;
}
} else {
System.arraycopy(partials, 0, this.partials[currentPartialsIndices[nodeIndex]][nodeIndex], 0, partials.length);
}
}
/**
* Calculates partial likelihoods at a node.
*
* @param nodeIndex1 the 'child 1' node
* @param nodeIndex2 the 'child 2' node
* @param nodeIndex3 the 'parent' node
*/
public void calculatePartials(int nodeIndex1, int nodeIndex2, int nodeIndex3) {
if (states[nodeIndex1] != null) {
if (states[nodeIndex2] != null) {
calculateStatesStatesPruning(
states[nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
states[nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3]);
} else {
calculateStatesPartialsPruning(states[nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
partials[currentPartialsIndices[nodeIndex2]][nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3]);
}
} else {
if (states[nodeIndex2] != null) {
calculateStatesPartialsPruning(states[nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex1]][nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3]);
} else {
calculatePartialsPartialsPruning(partials[currentPartialsIndices[nodeIndex1]][nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
partials[currentPartialsIndices[nodeIndex2]][nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3]);
}
}
if (useScaling) {
scalePartials(nodeIndex3);
}
//
// int k =0;
// for (int i = 0; i < patternCount; i++) {
// double f = 0.0;
//
// for (int j = 0; j < stateCount; j++) {
// f += partials[currentPartialsIndices[nodeIndex3]][nodeIndex3][k];
// k++;
// }
// if (f == 0.0) {
// Logger.getLogger("error").severe("A partial likelihood (node index = " + nodeIndex3 + ", pattern = "+ i +") is zero for all states.");
// }
// }
}
/**
* Calculates partial likelihoods at a node when both children have states.
*/
protected abstract void calculateStatesStatesPruning(int[] states1, double[] matrices1,
int[] states2, double[] matrices2,
double[] partials3);
/**
* Calculates partial likelihoods at a node when one child has states and one has partials.
*/
protected abstract void calculateStatesPartialsPruning(int[] states1, double[] matrices1,
double[] partials2, double[] matrices2,
double[] partials3);
/**
* Calculates partial likelihoods at a node when both children have partials.
*/
protected abstract void calculatePartialsPartialsPruning(double[] partials1, double[] matrices1,
double[] partials2, double[] matrices2,
double[] partials3);
/**
* Calculates partial likelihoods at a node.
*
* @param nodeIndex1 the 'child 1' node
* @param nodeIndex2 the 'child 2' node
* @param nodeIndex3 the 'parent' node
* @param matrixMap a map of which matrix to use for each pattern (can be null if integrating over categories)
*/
public void calculatePartials(int nodeIndex1, int nodeIndex2, int nodeIndex3, int[] matrixMap) {
if (states[nodeIndex1] != null) {
if (states[nodeIndex2] != null) {
calculateStatesStatesPruning(
states[nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
states[nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3], matrixMap);
} else {
calculateStatesPartialsPruning(
states[nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
partials[currentPartialsIndices[nodeIndex2]][nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3], matrixMap);
}
} else {
if (states[nodeIndex2] != null) {
calculateStatesPartialsPruning(
states[nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex1]][nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3], matrixMap);
} else {
calculatePartialsPartialsPruning(
partials[currentPartialsIndices[nodeIndex1]][nodeIndex1], matrices[currentMatricesIndices[nodeIndex1]][nodeIndex1],
partials[currentPartialsIndices[nodeIndex2]][nodeIndex2], matrices[currentMatricesIndices[nodeIndex2]][nodeIndex2],
partials[currentPartialsIndices[nodeIndex3]][nodeIndex3], matrixMap);
}
}
if (useScaling) {
scalePartials(nodeIndex3);
}
}
/**
* Calculates partial likelihoods at a node when both children have states.
*/
protected abstract void calculateStatesStatesPruning(int[] states1, double[] matrices1,
int[] states2, double[] matrices2,
double[] partials3, int[] matrixMap);
/**
* Calculates partial likelihoods at a node when one child has states and one has partials.
*/
protected abstract void calculateStatesPartialsPruning(int[] states1, double[] matrices1,
double[] partials2, double[] matrices2,
double[] partials3, int[] matrixMap);
/**
* Calculates partial likelihoods at a node when both children have partials.
*/
protected abstract void calculatePartialsPartialsPruning(double[] partials1, double[] matrices1,
double[] partials2, double[] matrices2,
double[] partials3, int[] matrixMap);
public void integratePartials(int nodeIndex, double[] proportions, double[] outPartials) {
calculateIntegratePartials(partials[currentPartialsIndices[nodeIndex]][nodeIndex], proportions, outPartials);
}
/**
* Integrates partials across categories.
*
* @param inPartials the partials at the node to be integrated
* @param proportions the proportions of sites in each category
* @param outPartials an array into which the integrated partials will go
*/
protected abstract void calculateIntegratePartials(double[] inPartials, double[] proportions, double[] outPartials);
/**
* Scale the partials at a given node. This uses a scaling suggested by Ziheng Yang in
* Yang (2000) J. Mol. Evol. 51: 423-432
* <p/>
* This function looks over the partial likelihoods for each state at each pattern
* and finds the largest. If this is less than the scalingThreshold (currently set
* to 1E-40) then it rescales the partials for that pattern by dividing by this number
* (i.e., normalizing to between 0, 1). It then stores the log of this scaling.
* This is called for every internal node after the partials are calculated so provides
* most of the performance hit. Ziheng suggests only doing this on a proportion of nodes
* but this sounded like a headache to organize (and he doesn't use the threshold idea
* which improves the performance quite a bit).
*
* @param nodeIndex
*/
protected void scalePartials(int nodeIndex) {
int u = 0;
for (int i = 0; i < patternCount; i++) {
double scaleFactor = 0.0;
int v = u;
for (int k = 0; k < matrixCount; k++) {
for (int j = 0; j < stateCount; j++) {
if (partials[currentPartialsIndices[nodeIndex]][nodeIndex][v] > scaleFactor) {
scaleFactor = partials[currentPartialsIndices[nodeIndex]][nodeIndex][v];
}
v++;
}
v += (patternCount - 1) * stateCount;
}
if (scaleFactor < scalingThreshold) {
v = u;
for (int k = 0; k < matrixCount; k++) {
for (int j = 0; j < stateCount; j++) {
partials[currentPartialsIndices[nodeIndex]][nodeIndex][v] /= scaleFactor;
v++;
}
v += (patternCount - 1) * stateCount;
}
scalingFactors[currentPartialsIndices[nodeIndex]][nodeIndex][i] = Math.log(scaleFactor);
} else {
scalingFactors[currentPartialsIndices[nodeIndex]][nodeIndex][i] = 0.0;
}
u += stateCount;
}
}
/**
* This function returns the scaling factor for that pattern by summing over
* the log scalings used at each node. If scaling is off then this just returns
* a 0.
*
* @return the log scaling factor
*/
public double getLogScalingFactor(int pattern) {
double logScalingFactor = 0.0;
if (useScaling) {
for (int i = 0; i < nodeCount; i++) {
logScalingFactor += scalingFactors[currentPartialsIndices[i]][i][pattern];
}
}
return logScalingFactor;
}
/**
* Gets the partials for a particular node.
*
* @param nodeIndex the node
* @param outPartials an array into which the partials will go
*/
public void getPartials(int nodeIndex, double[] outPartials) {
double[] partials1 = partials[currentPartialsIndices[nodeIndex]][nodeIndex];
System.arraycopy(partials1, 0, outPartials, 0, partialsSize);
}
/**
* Store current state
*/
public void storeState() {
System.arraycopy(currentMatricesIndices, 0, storedMatricesIndices, 0, nodeCount);
System.arraycopy(currentPartialsIndices, 0, storedPartialsIndices, 0, nodeCount);
}
/**
* Restore the stored state
*/
public void restoreState() {
// Rather than copying the stored stuff back, just swap the pointers...
int[] tmp1 = currentMatricesIndices;
currentMatricesIndices = storedMatricesIndices;
storedMatricesIndices = tmp1;
int[] tmp2 = currentPartialsIndices;
currentPartialsIndices = storedPartialsIndices;
storedPartialsIndices = tmp2;
}
}
| lgpl-2.1 |
mkrajcov/testsuite | src/main/java/org/jboss/hal/testsuite/page/config/NetworkInterfacesPage.java | 775 | package org.jboss.hal.testsuite.page.config;
import org.jboss.arquillian.graphene.Graphene;
import org.jboss.arquillian.graphene.page.Location;
import org.jboss.hal.testsuite.fragment.config.interfaces.NetworkInterfaceContentFragment;
import org.jboss.hal.testsuite.page.ConfigPage;
import org.jboss.hal.testsuite.util.PropUtils;
import org.openqa.selenium.By;
/**
* @author mkrajcov <mkrajcov@redhat.com>
*/
@Location("#profile/interfaces")
public class NetworkInterfacesPage extends ConfigPage {
private static final By CONTENT = By.id(PropUtils.get("page.content.area.id"));
public NetworkInterfaceContentFragment getContent(){
return Graphene.createPageFragment(NetworkInterfaceContentFragment.class, getContentRoot().findElement(CONTENT));
}
}
| lgpl-2.1 |
fpuna-cia/karaku | src/main/java/py/una/pol/karaku/services/server/KarakuWSAuthenticationProvider.java | 3356 | /*-
* Copyright (c)
*
* 2012-2014, Facultad Politécnica, Universidad Nacional de Asunción.
* 2012-2014, Facultad de Ciencias Médicas, Universidad Nacional de Asunción.
* 2012-2013, Centro Nacional de Computación, Universidad Nacional de Asunción.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package py.una.pol.karaku.services.server;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.authentication.dao.AbstractUserDetailsAuthenticationProvider;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import py.una.pol.karaku.security.KarakuUser;
import py.una.pol.karaku.security.KarakuUserService;
/**
* Clase que provee autenticación para usuarios.
*
* <p>
* Esta pensada para ser usada con un
* {@link org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint}
* , pero aún así y al heredar de
* {@link AbstractUserDetailsAuthenticationProvider} provee un mecanismo para
* autenticar cualquier mecanismo basado en Usuario y Password.
* </p>
* <p>
* Se basa en {@link KarakuUserService} para autenticar el usuario y luego para
* obtener los permisos, esto se hace en dos fases, la primera es
* {@link #retrieveUser(String, UsernamePasswordAuthenticationToken)} donde se
* autentica el usuario y se autoriza el acceso a expresiones como
* 'isAuthenticated()' y luego en
* {@link #additionalAuthenticationChecks(UserDetails, UsernamePasswordAuthenticationToken)}
* se cargan los permisos necesarios para que pueda navegar.
* </p>
*
* @author Arturo Volpe
* @since 2.2
* @version 1.0 Aug 6, 2013
* @see UserDetails
*
*/
public class KarakuWSAuthenticationProvider extends
AbstractUserDetailsAuthenticationProvider implements
AuthenticationProvider {
@Autowired
private KarakuUserService userService;
@Override
protected void additionalAuthenticationChecks(UserDetails userDetails,
UsernamePasswordAuthenticationToken authentication) {
userService.loadAuthorization(userDetails);
}
@Override
protected UserDetails retrieveUser(String username,
UsernamePasswordAuthenticationToken authentication) {
if (userService.checkAuthenthicationByUID(username, authentication
.getCredentials().toString())) {
KarakuUser user = new KarakuUser();
user.setUserName(username);
return user;
}
throw new UsernameNotFoundException(username);
}
}
| lgpl-2.1 |
ggiudetti/opencms-core | src/org/opencms/ui/dialogs/history/diff/CmsImageDiff.java | 4356 | /*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH & Co. KG (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.ui.dialogs.history.diff;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsResource;
import org.opencms.file.history.CmsHistoryResourceHandler;
import org.opencms.file.types.CmsResourceTypeImage;
import org.opencms.gwt.shared.CmsHistoryResourceBean;
import org.opencms.main.CmsException;
import org.opencms.main.OpenCms;
import org.opencms.util.CmsRequestUtil;
import org.opencms.workplace.comparison.CmsHistoryListUtil;
import com.google.common.base.Optional;
import com.vaadin.server.ExternalResource;
import com.vaadin.ui.Component;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Image;
import com.vaadin.ui.Panel;
/**
* Displays two image versions side by side, scaled.<p>
*/
public class CmsImageDiff implements I_CmsDiffProvider {
/**
* @see org.opencms.ui.dialogs.history.diff.I_CmsDiffProvider#diff(org.opencms.file.CmsObject, org.opencms.gwt.shared.CmsHistoryResourceBean, org.opencms.gwt.shared.CmsHistoryResourceBean)
*/
public Optional<Component> diff(CmsObject cms, CmsHistoryResourceBean v1, CmsHistoryResourceBean v2)
throws CmsException {
CmsResource r1 = A_CmsAttributeDiff.readResource(cms, v1);
if (OpenCms.getResourceManager().matchResourceType(CmsResourceTypeImage.getStaticTypeName(), r1.getTypeId())) {
HorizontalLayout hl = new HorizontalLayout();
hl.setSpacing(true);
String v1Param = v1.getVersion().getVersionNumber() != null
? "" + v1.getVersion().getVersionNumber()
: "" + CmsHistoryResourceHandler.PROJECT_OFFLINE_VERSION;
String v2Param = v2.getVersion().getVersionNumber() != null
? "" + v2.getVersion().getVersionNumber()
: "" + CmsHistoryResourceHandler.PROJECT_OFFLINE_VERSION;
String link1 = OpenCms.getLinkManager().substituteLinkForUnknownTarget(
cms,
CmsHistoryListUtil.getHistoryLink(cms, v1.getStructureId(), v1Param));
String link2 = OpenCms.getLinkManager().substituteLinkForUnknownTarget(
cms,
CmsHistoryListUtil.getHistoryLink(cms, v2.getStructureId(), v2Param));
int scaleWidth = 400;
int scaleHeight = (2 * scaleWidth) / 3;
final String scaleParams = "w:" + scaleWidth + ",h:" + scaleHeight + ",t:1"; // scale type 1 for thumbnails (no enlargement)
link1 = CmsRequestUtil.appendParameter(link1, "__scale", scaleParams);
link2 = CmsRequestUtil.appendParameter(link2, "__scale", scaleParams);
Image img1 = new Image("", new ExternalResource(link1));
Image img2 = new Image("", new ExternalResource(link2));
for (Image img : new Image[] {img1, img2}) {
img.setWidth("" + scaleWidth + "px");
}
img1.setCaption("V1");
img2.setCaption("V2");
hl.addComponent(img1);
hl.addComponent(img2);
Panel result = new Panel("Image comparison");
hl.setMargin(true);
result.setContent(hl);
return Optional.fromNullable((Component)result);
} else {
return Optional.absent();
}
}
}
| lgpl-2.1 |
tomazzupan/wildfly | clustering/server/src/main/java/org/wildfly/clustering/server/registry/CacheRegistry.java | 13247 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2014, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.wildfly.clustering.server.registry;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.infinispan.Cache;
import org.infinispan.commons.CacheException;
import org.infinispan.context.Flag;
import org.infinispan.distribution.ch.ConsistentHash;
import org.infinispan.filter.KeyFilter;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryCreated;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryModified;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryRemoved;
import org.infinispan.notifications.cachelistener.annotation.TopologyChanged;
import org.infinispan.notifications.cachelistener.event.CacheEntryEvent;
import org.infinispan.notifications.cachelistener.event.CacheEntryRemovedEvent;
import org.infinispan.notifications.cachelistener.event.Event;
import org.infinispan.notifications.cachelistener.event.TopologyChangedEvent;
import org.infinispan.remoting.transport.Address;
import org.jboss.as.clustering.logging.ClusteringLogger;
import org.jboss.threads.JBossThreadFactory;
import org.wildfly.clustering.ee.Batch;
import org.wildfly.clustering.ee.Batcher;
import org.wildfly.clustering.group.Group;
import org.wildfly.clustering.group.Node;
import org.wildfly.clustering.group.NodeFactory;
import org.wildfly.clustering.registry.Registry;
import org.wildfly.clustering.server.logging.ClusteringServerLogger;
import org.wildfly.clustering.service.concurrent.ClassLoaderThreadFactory;
import org.wildfly.security.manager.WildFlySecurityManager;
/**
* Clustered {@link Registry} backed by an Infinispan cache.
* @author Paul Ferraro
* @param <K> key type
* @param <V> value type
*/
@org.infinispan.notifications.Listener
public class CacheRegistry<K, V> implements Registry<K, V>, KeyFilter<Object> {
private static ThreadFactory createThreadFactory(Class<?> targetClass) {
PrivilegedAction<ThreadFactory> action = () -> new JBossThreadFactory(new ThreadGroup(targetClass.getSimpleName()), Boolean.FALSE, null, "%G - %t", null, null);
return new ClassLoaderThreadFactory(WildFlySecurityManager.doUnchecked(action),
AccessController.doPrivileged((PrivilegedAction<ClassLoader>) () -> targetClass.getClassLoader()));
}
private final ExecutorService topologyChangeExecutor = Executors.newSingleThreadExecutor(createThreadFactory(this.getClass()));
private final Map<Registry.Listener<K, V>, ExecutorService> listeners = new ConcurrentHashMap<>();
private final Cache<Node, Map.Entry<K, V>> cache;
private final Batcher<? extends Batch> batcher;
private final Group group;
private final NodeFactory<Address> factory;
private final Runnable closeTask;
private final Map.Entry<K, V> entry;
public CacheRegistry(CacheRegistryConfiguration<K, V> config, Map.Entry<K, V> entry, Runnable closeTask) {
this.cache = config.getCache();
this.batcher = config.getBatcher();
this.group = config.getGroup();
this.factory = config.getNodeFactory();
this.closeTask = closeTask;
this.entry = new AbstractMap.SimpleImmutableEntry<>(entry);
this.populateRegistry();
this.cache.addListener(this, new CacheRegistryFilter());
}
private void populateRegistry() {
try (Batch batch = this.batcher.createBatch()) {
this.cache.getAdvancedCache().withFlags(Flag.IGNORE_RETURN_VALUES).put(this.group.getLocalNode(), this.entry);
}
}
@Override
public boolean accept(Object key) {
return key instanceof Node;
}
@Override
public void close() {
this.cache.removeListener(this);
this.shutdown(this.topologyChangeExecutor);
Node node = this.getGroup().getLocalNode();
try (Batch batch = this.batcher.createBatch()) {
// If this remove fails, the entry will be auto-removed on topology change by the new primary owner
this.cache.getAdvancedCache().withFlags(Flag.IGNORE_RETURN_VALUES, Flag.FAIL_SILENTLY).remove(node);
} catch (CacheException e) {
ClusteringLogger.ROOT_LOGGER.warn(e.getLocalizedMessage(), e);
} finally {
// Cleanup any unregistered listeners
this.listeners.values().forEach(executor -> this.shutdown(executor));
this.listeners.clear();
this.closeTask.run();
}
}
@Override
public void addListener(Registry.Listener<K, V> listener) {
this.listeners.computeIfAbsent(listener, key -> Executors.newSingleThreadExecutor(createThreadFactory(listener.getClass())));
}
@Override
public void removeListener(Registry.Listener<K, V> listener) {
ExecutorService executor = this.listeners.remove(listener);
if (executor != null) {
this.shutdown(executor);
}
}
@Override
public Group getGroup() {
return this.group;
}
@Override
public Map<K, V> getEntries() {
Set<Node> nodes = this.group.getNodes().stream().collect(Collectors.toSet());
try (Batch batch = this.batcher.createBatch()) {
return this.cache.getAdvancedCache().getAll(nodes).values().stream().collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
}
}
@Override
public Map.Entry<K, V> getEntry(Node node) {
try (Batch batch = this.batcher.createBatch()) {
return this.cache.get(node);
}
}
@TopologyChanged
public void topologyChanged(TopologyChangedEvent<Node, Map.Entry<K, V>> event) {
if (event.isPre()) return;
ConsistentHash previousHash = event.getConsistentHashAtStart();
List<Address> previousMembers = previousHash.getMembers();
ConsistentHash hash = event.getConsistentHashAtEnd();
List<Address> members = hash.getMembers();
Address localAddress = event.getCache().getCacheManager().getAddress();
// Determine which nodes have left the cache view
Set<Address> addresses = new HashSet<>(previousMembers);
addresses.removeAll(members);
try {
this.topologyChangeExecutor.submit(() -> {
if (!addresses.isEmpty()) {
// We're only interested in the entries for which we are the primary owner
List<Node> nodes = addresses.stream().filter(address -> hash.locatePrimaryOwner(address).equals(localAddress)).map(address -> this.factory.createNode(address)).collect(Collectors.toList());
if (!nodes.isEmpty()) {
Cache<Node, Map.Entry<K, V>> cache = this.cache.getAdvancedCache().withFlags(Flag.FORCE_SYNCHRONOUS);
Map<K, V> removed = new HashMap<>();
try (Batch batch = this.batcher.createBatch()) {
for (Node node: nodes) {
Map.Entry<K, V> old = cache.remove(node);
if (old != null) {
removed.put(old.getKey(), old.getValue());
}
}
} catch (CacheException e) {
ClusteringServerLogger.ROOT_LOGGER.registryPurgeFailed(e, this.cache.getCacheManager().toString(), this.cache.getName(), nodes);
}
// Invoke listeners outside above tx context
if (!removed.isEmpty()) {
this.notifyListeners(Event.Type.CACHE_ENTRY_REMOVED, removed);
}
}
} else {
// This is a merge after cluster split: re-populate the cache registry with lost registry entries
if (!previousMembers.contains(localAddress)) {
// If this node is not a member at merge start, its mapping is lost and needs to be recreated and listeners notified
try {
this.populateRegistry();
// Local cache events do not trigger notifications
this.notifyListeners(Event.Type.CACHE_ENTRY_CREATED, this.entry);
} catch (CacheException e) {
ClusteringServerLogger.ROOT_LOGGER.failedToRestoreLocalRegistryEntry(e, this.cache.getCacheManager().toString(), this.cache.getName());
}
}
}
});
} catch (RejectedExecutionException e) {
// Executor was shutdown
}
}
@CacheEntryCreated
@CacheEntryModified
public void event(CacheEntryEvent<Node, Map.Entry<K, V>> event) {
if (event.isOriginLocal() || event.isPre()) return;
if (!this.listeners.isEmpty()) {
Map.Entry<K, V> entry = event.getValue();
if (entry != null) {
this.notifyListeners(event.getType(), entry);
}
}
}
@CacheEntryRemoved
public void removed(CacheEntryRemovedEvent<Node, Map.Entry<K, V>> event) {
if (event.isOriginLocal() || event.isPre()) return;
if (!this.listeners.isEmpty()) {
Map.Entry<K, V> entry = event.getOldValue();
// WFLY-4938 For some reason, the old value can be null
if (entry != null) {
this.notifyListeners(event.getType(), entry);
}
}
}
private void notifyListeners(Event.Type type, Map.Entry<K, V> entry) {
this.notifyListeners(type, Collections.singletonMap(entry.getKey(), entry.getValue()));
}
private void notifyListeners(Event.Type type, Map<K, V> entries) {
for (Map.Entry<Listener<K, V>, ExecutorService> entry: this.listeners.entrySet()) {
Listener<K, V> listener = entry.getKey();
ExecutorService executor = entry.getValue();
try {
executor.submit(() -> {
try {
switch (type) {
case CACHE_ENTRY_CREATED: {
listener.addedEntries(entries);
break;
}
case CACHE_ENTRY_MODIFIED: {
listener.updatedEntries(entries);
break;
}
case CACHE_ENTRY_REMOVED: {
listener.removedEntries(entries);
break;
}
default: {
throw new IllegalStateException(type.name());
}
}
} catch (Throwable e) {
ClusteringServerLogger.ROOT_LOGGER.registryListenerFailed(e, this.cache.getCacheManager().getCacheManagerConfiguration().globalJmxStatistics().cacheManagerName(), this.cache.getName(), type, entries);
}
});
} catch (RejectedExecutionException e) {
// Executor was shutdown
}
}
}
private void shutdown(ExecutorService executor) {
PrivilegedAction<List<Runnable>> action = () -> executor.shutdownNow();
WildFlySecurityManager.doUnchecked(action);
try {
executor.awaitTermination(this.cache.getCacheConfiguration().transaction().cacheStopTimeout(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
| lgpl-2.1 |
gwoptics/gwoptics_plib | src/org/gwoptics/graphics/graph2D/traces/IGraph2DTrace.java | 3265 | /**
* Copyright notice
*
* This file is part of the Processing library `gwoptics'
* http://www.gwoptics.org/processing/gwoptics_p5lib/
*
* Copyright (C) 2009 onwards Daniel Brown and Andreas Freise
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License version 2.1 as published
* by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.gwoptics.graphics.graph2D.traces;
import org.gwoptics.graphics.IRenderable;
import org.gwoptics.graphics.graph2D.IGraph2D;
import org.gwoptics.graphics.graph2D.effects.ITraceColourEffect;
/**
* <p> This interface provides the functionality that is required of a trace
* object, to be used by the Graph2D control. </p> <p> Classes implementing this
* interface should be programmed for efficiency, as they are called many times
* per draw loop. The intended way to work is to store the points of the line in
* an internal array. When the draw() method is called from the IRenderable
* interface the method should use the array for plotting the trace. Usually the
* trace will not change so it is unnescessary to recalculate the equations
* everytime. </p> <p> Traces also have the option of having an
* ITraceColourEffect applied to it. The draw() method should use this effect
* object to determine the colour of the trace at given points. </p>
*
* @author Daniel Brown 13/7/09
* @since 0.4.0
* @see ITraceColourEffect
* @see ILine2DEquation
*/
public interface IGraph2DTrace extends IRenderable {
/**
* Sets an internal variable to store a reference to the graph object the
* trace is being plotted on
*/
void setGraph(IGraph2D grp);
/**
* This is called everytime the equation callback object is changed.
*/
void generate();
/**
* alters the initial position of the trace on the graph
*/
void setPosition(int x, int y);
/**
* <p> Before the trace is added to the graph control this method is called.
* It allows a trace to check the settings of other traces that have
* previously been added for in Compatibilities. Leave method empty in
* implementation if no checks are necessary. </p> <p>w onAddTrace is called
* from with a synchronised lock so the traces object won't be modified whilst
* reading it. Therefore it is not necessary to provide custom thread locks.
* </p>
*/
void onAddTrace(Object traces[]);
/**
* <p> Before the trace is officially removed from the trace list of a Graph2D
* object, the onRemove method is called. This allows the trace object to
* provide any cleanup needed, if at all needed. Leave blank if nothing is
* needed.
* </p>
*/
void onRemoveTrace();
}
| lgpl-2.1 |
hy2708/hy2708-repository | java/commons/commons-lang/src/main/java/org/hy/commons/lang/timer/TimerTaskProxy.java | 1420 | package org.hy.commons.lang.timer;
import java.util.Timer;
import java.util.TimerTask;
import org.apache.commons.logging.LogFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* *
*
* @author uwaysoft 魏韶颖 时间:2009年4月20日15:02:58
*/
public class TimerTaskProxy extends TimerTask {
Logger logger = LoggerFactory.getLogger(TimerTaskProxy.class);
private int count;
private Timer timer;
private int hasExeNum = 0;
private TimerTask timerTask;
/**
* 定时任务代理 调用客户写的子类run方法
* @param subClassName 子类名
* @param count 执行多少次
* @param
* timer 定时器
*/
public TimerTaskProxy(String subClassName, int count, Timer timer) {
super();
this.count = count;
this.timer = timer;
try {
this.timerTask = (TimerTask) Class.forName(subClassName)
.newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
public void run() {
/**
* 运用了多态 条件:继承,重写,父类引用指向子类对象 所以调用的是子类的run()
*/
timerTask.run();
hasExeNum++;
if (count == hasExeNum) {
timer.cancel();
logger.info("定时任务执行完毕,线程成功终止!");
}
}
}
| lgpl-3.0 |
imyousuf/smart-dao | smart-dao-queryparam/src/main/java/com/smartitengineering/dao/common/queryparam/CompoundQueryParameter.java | 1217 | /*
* This is a common dao with basic CRUD operations and is not limited to any
* persistent layer implementation
*
* Copyright (C) 2008 Imran M Yousuf (imyousuf@smartitengineering.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package com.smartitengineering.dao.common.queryparam;
import java.util.Collection;
/**
*
* @author imyousuf
*/
public interface CompoundQueryParameter<Template extends Object>
extends QueryParameter<Template> {
public Collection<QueryParameter> getNestedParameters();
}
| lgpl-3.0 |
loftuxab/community-edition-old | modules/sharepoint/amp/source/java/org/alfresco/module/vti/web/actions/VtiIfHeaderAction.java | 3337 | /*
* Copyright (C) 2005-2013 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.module.vti.web.actions;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.alfresco.module.vti.handler.MethodHandler;
import org.alfresco.module.vti.web.VtiAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* <p>VtiIfHeaderAction is used for merging between client document version
* and server document version.</p>
*
* @author PavelYur
*/
public class VtiIfHeaderAction extends HttpServlet implements VtiAction
{
private static final long serialVersionUID = 3119971805600532320L;
private final static Log logger = LogFactory.getLog(VtiIfHeaderAction.class);
private MethodHandler handler;
/**
* <p>
* MethodHandler setter
* </p>
* @param handler {@link org.alfresco.module.vti.handler.MethodHandler}
*/
public void setHandler(MethodHandler handler)
{
this.handler = handler;
}
/**
* <p>Getting server version of document for merging.</p>
*
* @param request HTTP request
* @param response HTTP response
*/
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException
{
this.handler.existResource(req, resp);
}
/**
* <p>Saving of client version of document while merging.</p>
*
* @param request HTTP request
* @param response HTTP response
*/
@Override
protected void doPut(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException
{
this.handler.putResource(req, resp);
}
/**
* <p>Merge between client document version and server document version.</p>
*
* @param request HTTP request
* @param response HTTP response
*/
public void execute(HttpServletRequest request, HttpServletResponse response)
{
try
{
service(request, response);
}
catch (IOException e)
{
if (logger.isDebugEnabled())
{
logger.debug("Action IO exception", e);
}
}
catch (ServletException e)
{
if (logger.isDebugEnabled())
{
logger.debug("Action execution exception", e);
}
}
}
}
| lgpl-3.0 |
simeshev/parabuild-ci | test/src/org/parabuild/ci/build/log/SSTestBoostTestLogHandler.java | 5198 | /*
* Parabuild CI licenses this file to You under the LGPL 2.1
* (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.gnu.org/licenses/lgpl-3.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parabuild.ci.build.log;
import com.gargoylesoftware.base.testing.OrderedTestSuite;
import junit.framework.TestSuite;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.parabuild.ci.TestHelper;
import org.parabuild.ci.build.AbstractCustomLogTest;
import org.parabuild.ci.common.IoUtils;
import org.parabuild.ci.object.LogConfig;
import org.parabuild.ci.object.StepRunAttribute;
import java.io.File;
import java.io.FileFilter;
import java.util.Map;
/**
* Tests NUnitLogHandler
*
* @see NUnitLogHandler
* @see org.parabuild.ci.build.AbstractCustomLogTest
*/
public class SSTestBoostTestLogHandler extends AbstractCustomLogTest {
/**
* @noinspection UNUSED_SYMBOL
*/
private static final Log log = LogFactory.getLog(SSTestBoostTestLogHandler.class);
private BoostTestLogHandler logHandler = null;
/**
* @see org.parabuild.ci.build.AbstractCustomLogTest#processLogs
*/
protected void processLogs() {
this.logHandler.process();
}
/**
* @return log type handler being tested
*/
protected byte logTypeBeingTested() {
return LogConfig.LOG_TYPE_BOOST_TEST_XML_DIR;
}
/**
* Should return ID of log config to be used to configure log
* handler.
*/
protected int logConfigID() {
return 31;
}
/**
* Return a string to be found in search after calling
* processLogs.
*
* @return
* @see org.parabuild.ci.build.AbstractCustomLogTest - parent class that will call
* this method after calling processLogs().
* @see #processLogs
*/
protected String stringToBeFoundBySearch() {
return ""; // REVIEWME: simeshev@parabuilci.org -> change to non-blank when Boost Test handler is capable of indexing.
}
public void test_savesStatistics() {
// get attr number before
final Map before = cm.getStepRunAttributesAsMap(TEST_STEP_RUN_ID);
final int countBefore = before.size();
assertAttrNotExist(before, StepRunAttribute.ATTR_BOOST_TEST_ERRORS);
assertAttrNotExist(before, StepRunAttribute.ATTR_BOOST_TEST_SUCCESSES);
assertAttrNotExist(before, StepRunAttribute.ATTR_BOOST_TEST_TESTS);
// handle
this.logHandler.process();
// check if PMD stat attr found an has appropriate value
final Map after = cm.getStepRunAttributesAsMap(TEST_STEP_RUN_ID);
final int countAfter = after.size();
assertTrue("Number of attributes should increase", countAfter > countBefore);
assertAttrEquals(after, StepRunAttribute.ATTR_BOOST_TEST_ERRORS, 4);
assertAttrEquals(after, StepRunAttribute.ATTR_BOOST_TEST_SUCCESSES, 8);
assertAttrEquals(after, StepRunAttribute.ATTR_BOOST_TEST_TESTS, 12);
}
public static void assertAttrEquals(final Map attrMap, final String attrName, final int value) {
final StepRunAttribute found = (StepRunAttribute) attrMap.get(attrName);
assertNotNull("Attribute " + attrName + " should be present", found);
assertEquals("\" + attrName + \"", value, found.getValueAsInt());
}
public static void assertAttrNotExist(final Map attrMap, final String attrName) {
assertNull("Attrbute " + attrName + " should not exist", attrMap.get(attrName));
}
protected void setUp() throws Exception {
super.setUp();
// create handler
this.logHandler = new BoostTestLogHandler(super.agent, super.buildRunConfig,
super.remoteCheckoutDir + '/' + super.relativeBuildDir,
super.logConfig, TEST_STEP_RUN_ID);
// create test log files to simulate presence of the log
// create dir
final String remoteFileSeparator = super.agent.getSystemProperty("file.separator");
final String testBuildLogDirName = super.remoteBuildDirName
+ remoteFileSeparator + logConfig.getPath().trim();
super.agent.mkdirs(testBuildLogDirName);
// list test Boost test log files
final File[] list = new File(TestHelper.getTestDataDir(), "boost").listFiles(new FileFilter() {
public boolean accept(final File pathname) {
return (!pathname.isDirectory());
}
});
// create files in the dir
for (int i = 0; i < list.length; i++) {
final String testLogFileToCreate = testBuildLogDirName + remoteFileSeparator + list[i].getName();
agent.createFile(testLogFileToCreate, IoUtils.fileToString(list[i]));
}
}
/**
* Required by JUnit
*/
public static TestSuite suite() {
return new OrderedTestSuite(SSTestBoostTestLogHandler.class,
new String[]{
"test_process"
});
}
public SSTestBoostTestLogHandler(final String s) {
super(s);
}
}
| lgpl-3.0 |
saschaiseli/opentrainingcenter_e4 | com.garmin.fit/src/com/garmin/fit/SdmProfileMesg.java | 5938 | ////////////////////////////////////////////////////////////////////////////////
// The following FIT Protocol software provided may be used with FIT protocol
// devices only and remains the copyrighted property of Dynastream Innovations Inc.
// The software is being provided on an "as-is" basis and as an accommodation,
// and therefore all warranties, representations, or guarantees of any kind
// (whether express, implied or statutory) including, without limitation,
// warranties of merchantability, non-infringement, or fitness for a particular
// purpose, are specifically disclaimed.
//
// Copyright 2014 Dynastream Innovations Inc.
////////////////////////////////////////////////////////////////////////////////
// ****WARNING**** This file is auto-generated! Do NOT edit this file.
// Profile Version = 12.10Release
// Tag = $Name$
////////////////////////////////////////////////////////////////////////////////
package com.garmin.fit;
public class SdmProfileMesg extends Mesg {
protected static final Mesg sdmProfileMesg;
static {
// sdm_profile
sdmProfileMesg = new Mesg("sdm_profile", MesgNum.SDM_PROFILE);
sdmProfileMesg.addField(new Field("message_index", 254, 132, 1, 0, "", false));
sdmProfileMesg.addField(new Field("enabled", 0, 0, 1, 0, "", false));
sdmProfileMesg.addField(new Field("sdm_ant_id", 1, 139, 1, 0, "", false));
sdmProfileMesg.addField(new Field("sdm_cal_factor", 2, 132, 10, 0, "%", false));
sdmProfileMesg.addField(new Field("odometer", 3, 134, 100, 0, "m", false));
sdmProfileMesg.addField(new Field("speed_source", 4, 0, 1, 0, "", false));
sdmProfileMesg.addField(new Field("sdm_ant_id_trans_type", 5, 10, 1, 0, "", false));
sdmProfileMesg.addField(new Field("odometer_rollover", 7, 2, 1, 0, "", false));
}
public SdmProfileMesg() {
super(Factory.createMesg(MesgNum.SDM_PROFILE));
}
public SdmProfileMesg(final Mesg mesg) {
super(mesg);
}
/**
* Get message_index field
*
* @return message_index
*/
public Integer getMessageIndex() {
return getFieldIntegerValue(254, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Set message_index field
*
* @param messageIndex
*/
public void setMessageIndex(Integer messageIndex) {
setFieldValue(254, 0, messageIndex, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Get enabled field
*
* @return enabled
*/
public Bool getEnabled() {
Short value = getFieldShortValue(0, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
if (value == null)
return null;
return Bool.getByValue(value);
}
/**
* Set enabled field
*
* @param enabled
*/
public void setEnabled(Bool enabled) {
setFieldValue(0, 0, enabled.value, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Get sdm_ant_id field
*
* @return sdm_ant_id
*/
public Integer getSdmAntId() {
return getFieldIntegerValue(1, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Set sdm_ant_id field
*
* @param sdmAntId
*/
public void setSdmAntId(Integer sdmAntId) {
setFieldValue(1, 0, sdmAntId, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Get sdm_cal_factor field
* Units: %
*
* @return sdm_cal_factor
*/
public Float getSdmCalFactor() {
return getFieldFloatValue(2, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Set sdm_cal_factor field
* Units: %
*
* @param sdmCalFactor
*/
public void setSdmCalFactor(Float sdmCalFactor) {
setFieldValue(2, 0, sdmCalFactor, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Get odometer field
* Units: m
*
* @return odometer
*/
public Float getOdometer() {
return getFieldFloatValue(3, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Set odometer field
* Units: m
*
* @param odometer
*/
public void setOdometer(Float odometer) {
setFieldValue(3, 0, odometer, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Get speed_source field
* Comment: Use footpod for speed source instead of GPS
*
* @return speed_source
*/
public Bool getSpeedSource() {
Short value = getFieldShortValue(4, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
if (value == null)
return null;
return Bool.getByValue(value);
}
/**
* Set speed_source field
* Comment: Use footpod for speed source instead of GPS
*
* @param speedSource
*/
public void setSpeedSource(Bool speedSource) {
setFieldValue(4, 0, speedSource.value, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Get sdm_ant_id_trans_type field
*
* @return sdm_ant_id_trans_type
*/
public Short getSdmAntIdTransType() {
return getFieldShortValue(5, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Set sdm_ant_id_trans_type field
*
* @param sdmAntIdTransType
*/
public void setSdmAntIdTransType(Short sdmAntIdTransType) {
setFieldValue(5, 0, sdmAntIdTransType, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Get odometer_rollover field
* Comment: Rollover counter that can be used to extend the odometer
*
* @return odometer_rollover
*/
public Short getOdometerRollover() {
return getFieldShortValue(7, 0, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
/**
* Set odometer_rollover field
* Comment: Rollover counter that can be used to extend the odometer
*
* @param odometerRollover
*/
public void setOdometerRollover(Short odometerRollover) {
setFieldValue(7, 0, odometerRollover, Fit.SUBFIELD_INDEX_MAIN_FIELD);
}
}
| lgpl-3.0 |
suegy/JGAP | tests/org/jgap/impl/IntegerGeneTest.java | 27531 | /*
* This file is part of JGAP.
*
* JGAP offers a dual license model containing the LGPL as well as the MPL.
*
* For licensing information please see the file license.txt included with JGAP
* or have a look at the top of class org.jgap.Chromosome which representatively
* includes the JGAP license policy applicable for any file delivered with JGAP.
*/
package org.jgap.impl;
import java.util.*;
import org.jgap.*;
import junit.framework.*;
/**
* Tests the IntegerGene class.
*
* @author Klaus Meffert
* @since 1.1
*/
public class IntegerGeneTest
extends JGAPTestCase {
/** String containing the CVS revision. Read out via reflection!*/
private final static String CVS_REVISION = "$Revision: 1.47 $";
public static Test suite() {
TestSuite suite = new TestSuite(IntegerGeneTest.class);
return suite;
}
public void testConstruct_0()
throws Exception {
Gene gene = new IntegerGene(conf, 1, 100);
//following should be possible without exception
gene.setAllele(new Integer(101));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 3.1
*/
public void testConstruct_1()
throws Exception {
Genotype.setStaticConfiguration(conf);
Gene gene = new IntegerGene();
assertSame(conf, gene.getConfiguration());
}
public void testToString_0()
throws Exception {
Gene gene = new IntegerGene(conf, 1, 100);
gene.setAllele(new Integer(47));
assertEquals("IntegerGene(1,100)=47", gene.toString());
}
public void testToString_1()
throws Exception {
Gene gene = new IntegerGene(conf, -2, 100);
gene.setAllele(new Integer(99));
assertEquals("IntegerGene(-2,100)=99", gene.toString());
}
public void testGetAllele_0()
throws Exception {
Gene gene = new IntegerGene(conf, 1, 100);
gene.setAllele(new Integer(33));
assertEquals(new Integer(33), gene.getAllele());
}
public void testGetAllele_1()
throws Exception {
Gene gene = new IntegerGene(conf, 1, 100);
gene.setAllele(new Integer(1));
assertEquals(new Integer(1), gene.getAllele());
}
public void testGetAllele_2()
throws Exception {
Gene gene = new IntegerGene(conf, 1, 100);
gene.setAllele(new Integer(100));
assertEquals(new Integer(100), gene.getAllele());
}
public void testEquals_0()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
Gene gene2 = new IntegerGene(conf, 1, 100);
assertTrue(gene1.equals(gene2));
assertTrue(gene2.equals(gene1));
}
public void testEquals_1()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
assertFalse(gene1.equals(null));
}
public void testEquals_2()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
assertFalse(gene1.equals(new BooleanGene(conf)));
}
public void testEquals_3()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
assertFalse(gene1.equals(new Vector()));
}
public void testEquals_4()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
Gene gene2 = new IntegerGene(conf, 1, 99);
assertTrue(gene1.equals(gene2));
assertTrue(gene2.equals(gene1));
}
public void testEquals_5()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
Gene gene2 = new DoubleGene(conf, 1, 99);
assertFalse(gene1.equals(gene2));
assertFalse(gene2.equals(gene1));
}
/**
* Uses subclass of IntegerGene.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testEquals_6()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
Gene gene2 = new IntegerGene(conf, 1, 100);
gene1.setAllele(new Integer(45));
gene2.setAllele(new Integer(46));
assertFalse(gene1.equals(gene2));
assertFalse(gene2.equals(gene1));
}
/**
* Uses subclass of IntegerGene.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testEquals_7()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 100);
gene1.setAllele(new Integer(7));
Gene gene2 = new IntegerGene2(conf, 1, 100);
gene2.setAllele(new Integer(7));
assertFalse(gene1.equals(gene2));
assertFalse(gene2.equals(gene1));
}
/**
* Compare with application data set and option for comparation activated.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testEquals_8()
throws Exception {
BaseGene gene1 = new IntegerGene(conf, 1, 100);
gene1.setAllele(new Integer(7));
gene1.setApplicationData(new Integer(7));
gene1.setCompareApplicationData(true);
Gene gene2 = new IntegerGene(conf, 1, 100);
gene2.setApplicationData(new Integer(7));
gene2.setAllele(new Integer(7));
gene2.setCompareApplicationData(true);
assertTrue(gene1.equals(gene2));
assertTrue(gene2.equals(gene1));
}
/**
* Compare with application data set and option for comparation activated.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testEquals_9()
throws Exception {
BaseGene gene1 = new IntegerGene(conf, 1, 100);
gene1.setAllele(new Integer(7));
gene1.setApplicationData(new Integer(7));
gene1.setCompareApplicationData(true);
Gene gene2 = new IntegerGene(conf, 1, 100);
gene2.setCompareApplicationData(true);
gene2.setApplicationData(new Integer(7));
gene2.setAllele(new Integer(7));
assertTrue(gene1.equals(gene2));
assertTrue(gene2.equals(gene1));
}
/**
* Compare with application data set and option for comparation activated.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testEquals_9_2()
throws Exception {
BaseGene gene1 = new IntegerGene(conf, 1, 100);
gene1.setAllele(new Integer(8));
gene1.setApplicationData(new Integer(5));
gene1.setCompareApplicationData(true);
Gene gene2 = new IntegerGene(conf, 1, 100);
gene2.setCompareApplicationData(true);
gene2.setApplicationData(new Integer(7));
gene2.setAllele(new Integer(8));
assertFalse(gene1.equals(gene2));
assertFalse(gene2.equals(gene1));
}
/*
* @throws Exception
* @author Klaus Meffert
* @since 2.4
*/
public void testEquals_10()
throws Exception {
Configuration conf = new ConfigurationForTesting();
conf.setRandomGenerator(new RandomGeneratorForTesting(5));
Gene gene1 = new IntegerGene(conf, 1, 100);
gene1.setAllele(new Integer(8));
Gene gene2 = new IntegerGene(conf, 1, 99);
gene2.setAllele(new Integer( -8));
assertFalse(gene1.equals(gene2));
assertFalse(gene2.equals(gene1));
}
/**
* Compare with application data set but option for comparation deactivated.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testEquals_9_3()
throws Exception {
BaseGene gene1 = new IntegerGene(conf, 1, 100);
gene1.setAllele(new Integer(8));
gene1.setApplicationData(new Integer(5));
gene1.setCompareApplicationData(false);
Gene gene2 = new IntegerGene(conf, 1, 100);
gene2.setCompareApplicationData(false);
gene2.setApplicationData(new Integer(7));
gene2.setAllele(new Integer(8));
assertTrue(gene1.equals(gene2));
assertTrue(gene2.equals(gene1));
}
public void testIntValue_0()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf, 1, 10000);
gene1.setAllele(new Integer(4711));
assertEquals(4711, gene1.intValue());
}
public void testIntValue_1()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf, 1, 10000);
gene1.setAllele(null);
try {
assertEquals(0, gene1.intValue());
fail();
} catch (NullPointerException nullex) {
; //this is OK
}
}
/**
* Set Allele to null, no exception should occur.
*
* @throws Exception
*/
public void testSetAllele_0()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 10000);
gene1.setAllele(null);
}
public void testSetAllele_1()
throws Exception {
Gene gene1 = new IntegerGene(conf, 1, 10000);
try {
gene1.setAllele("22");
fail();
} catch (ClassCastException classex) {
; //this is OK
}
}
/**
* Set Allele to value out of bounds with bounds' dimension of 1.
*
* @throws Exception
*
* @author Klaus Meffert
* @since 3.2
*/
public void testSetAllele_2()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf, 0,0);
gene1.setAllele(new Integer(1));
assertEquals(0, gene1.intValue());
}
/**
* Set Allele to value out of bounds with bounds' dimension of 1.
*
* @throws Exception
*
* @author Klaus Meffert
* @since 3.2
*/
public void testSetAllele_3()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf, 1,1);
gene1.setAllele(new Integer(2));
assertEquals(1, gene1.intValue());
}
public void testNewGene_0()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf, 1, 10000);
IGeneConstraintChecker checker = new GeneConstraintChecker();
gene1.setConstraintChecker(checker);
gene1.setAllele(new Integer(4711));
int lower1 = gene1.getLowerBounds();
int upper1 = gene1.getUpperBounds();
IntegerGene gene2 = (IntegerGene) gene1.newGene();
int lower2 = gene2.getLowerBounds();
int upper2 = gene2.getUpperBounds();
assertEquals(lower1, lower2);
assertEquals(upper1, upper2);
assertEquals(checker, gene2.getConstraintChecker());
}
public void testCleanup()
throws Exception {
//cleanup should do nothing!
Gene gene = new IntegerGene(conf, 1, 6);
Gene copy = gene.newGene();
gene.cleanup();
assertEquals(copy, gene);
}
public void testPersistentRepresentation_0()
throws Exception {
Gene gene1 = new IntegerGene(conf, 2, 753);
gene1.setAllele(new Integer(45));
String pres1 = gene1.getPersistentRepresentation();
Gene gene2 = new IntegerGene(conf);
gene2.setValueFromPersistentRepresentation(pres1);
String pres2 = gene2.getPersistentRepresentation();
assertEquals(pres1, pres2);
}
/**
* Should be possible without exception.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testPersistentRepresentation_1()
throws Exception {
Gene gene1 = new IntegerGene(conf, 2, 753);
gene1.setAllele(new Integer(45));
gene1.setValueFromPersistentRepresentation(null);
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testPersistentRepresentation_2()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf, 2, 753);
gene1.setAllele(new Integer(45));
gene1.setValueFromPersistentRepresentation("2"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "3"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "4");
assertEquals(2, ( (Integer) gene1.getAllele()).intValue());
assertEquals(3, gene1.getLowerBounds());
assertEquals(4, gene1.getUpperBounds());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testPersistentRepresentation_3()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf, 2, 753);
gene1.setAllele(new Integer(45));
gene1.setValueFromPersistentRepresentation("null"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "-3"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "4");
assertNull(gene1.getAllele());
assertEquals( -3, gene1.getLowerBounds());
assertEquals(4, gene1.getUpperBounds());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testPersistentRepresentation_4()
throws Exception {
Gene gene1 = new IntegerGene(conf, 2, 753);
gene1.setAllele(new Integer(45));
try {
gene1.setValueFromPersistentRepresentation("null"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "3.5"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "4");
fail();
} catch (UnsupportedRepresentationException uex) {
; //this is OK
}
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testPersistentRepresentation_5()
throws Exception {
Gene gene1 = new IntegerGene(conf, 2, 753);
gene1.setAllele(new Integer(45));
try {
gene1.setValueFromPersistentRepresentation("null"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "3"
+ IntegerGene.
PERSISTENT_FIELD_DELIMITER
+ "a");
fail();
} catch (UnsupportedRepresentationException uex) {
; //this is OK
}
}
/**
* Constructed IntegerGene without user initialization.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.6
*/
public void testPersistentRepresentation_6()
throws Exception {
IntegerGene gene1 = new IntegerGene(conf);
assertEquals("null" + IntegerGene.PERSISTENT_FIELD_DELIMITER
+ Integer.MIN_VALUE + IntegerGene.PERSISTENT_FIELD_DELIMITER
+ Integer.MAX_VALUE,
gene1.getPersistentRepresentation());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareToNative_0()
throws Exception {
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(58));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer(59));
assertEquals( ( (Integer) gene1.getAllele()).compareTo( (Integer) gene2.
getAllele()), gene1.compareTo(gene2));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareToNative_1()
throws Exception {
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(58));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer(58));
assertEquals( ( (Integer) gene1.getAllele()).compareTo( (Integer) gene2.
getAllele()), gene1.compareTo(gene2));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareToNative_2()
throws Exception {
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(59));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer(58));
assertEquals( ( (Integer) gene1.getAllele()).compareTo( (Integer) gene2.
getAllele()), gene1.compareTo(gene2));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareToNative_3()
throws Exception {
Configuration conf = new ConfigurationForTesting();
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(59));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer( -59));
assertEquals( ( (Integer) gene1.getAllele()).compareTo( (Integer) gene2.
getAllele()), gene1.compareTo(gene2));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareToNative_4()
throws Exception {
Configuration conf = new ConfigurationForTesting();
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(0));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer( -0));
assertEquals( ( (Integer) gene1.getAllele()).compareTo( (Integer) gene2.
getAllele()), gene1.compareTo(gene2));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testCompareTo_0()
throws Exception {
Configuration conf = new ConfigurationForTesting();
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(0));
Gene gene2 = new IntegerGene2(conf, 53, 67);
gene2.setAllele(new Integer( -0));
try {
gene1.compareTo(gene2);
fail();
} catch (ClassCastException cex) {
; //this is OK
}
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareTo_1()
throws Exception {
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(58));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer(59));
assertEquals( -1, gene1.compareTo(gene2));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareTo_2()
throws Exception {
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(58));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer(58));
assertEquals(0, gene1.compareTo(gene2));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareTo_3()
throws Exception {
Gene gene1 = new IntegerGene(conf, 13, 65);
gene1.setAllele(new Integer(59));
Gene gene2 = new IntegerGene(conf, 53, 67);
gene2.setAllele(new Integer(58));
assertEquals(1, gene1.compareTo(gene2));
assertEquals( -1, gene2.compareTo(gene1));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testCompareTo_4()
throws Exception {
Gene gene1 = new IntegerGene(conf, 13, 65);
Gene gene2 = new IntegerGene(conf, 53, 67);
assertEquals(0, gene1.compareTo(gene2));
assertEquals(0, gene2.compareTo(gene1));
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testApplyMutation_0()
throws Exception {
IntegerGene gene = new IntegerGene(conf, 0, 100);
gene.setAllele(new Integer(50));
gene.applyMutation(0, 0.0d);
assertEquals(50, gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testApplyMutation_1()
throws Exception {
conf.setRandomGenerator(new RandomGeneratorForTesting(15));
IntegerGene gene = new IntegerGene(conf, 0, 100);
gene.setAllele(new Integer(50));
gene.applyMutation(0, 0.5d);
assertEquals(Math.round(50 + (100 - 0) * 0.5d), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testApplyMutation_2()
throws Exception {
conf.setRandomGenerator(new RandomGeneratorForTesting(15));
IntegerGene gene = new IntegerGene(conf, 44, 100);
gene.setAllele(new Integer(50));
gene.applyMutation(0, 0.3d);
assertEquals(Math.round(50 + (100 - 44) * 0.3d), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testApplyMutation_3()
throws Exception {
RandomGeneratorForTesting rn = new RandomGeneratorForTesting(15);
rn.setNextDouble(0.5d);
conf.setRandomGenerator(rn);
IntegerGene gene = new IntegerGene(conf, 33, 100);
gene.setAllele(new Integer(50));
gene.applyMutation(0, 1.9d);
assertEquals(Math.round((100-33)*0.5d + 33), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testApplyMutation_4()
throws Exception {
RandomGeneratorForTesting rn = new RandomGeneratorForTesting(15);
rn.setNextDouble(0.3d);
conf.setRandomGenerator(rn);
IntegerGene gene = new IntegerGene(conf, 2, 100);
gene.setAllele(new Integer(60));
gene.applyMutation(0, 1.9d);
assertEquals(Math.round( (100 - 2) * 0.3d + 2), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testApplyMutation_5()
throws Exception {
RandomGeneratorForTesting rn = new RandomGeneratorForTesting(15);
rn.setNextDouble(0.25d);
conf.setRandomGenerator(rn);
IntegerGene gene = new IntegerGene(conf, 0, 100);
gene.setAllele(new Integer(60));
gene.applyMutation(0, -1.0d);
assertEquals(Math.round( (100 - 0) * 0.25d + 0), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testApplyMutation_6()
throws Exception {
IntegerGene gene = new IntegerGene(conf, 0, 100);
gene.setAllele(new Integer(60));
gene.applyMutation(0, -0.4d);
assertEquals(Math.round(60 + (100 * ( -0.4d))), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.6
*/
public void testApplyMutation_7()
throws Exception {
IntegerGene gene = new IntegerGene(conf, 0, 100);
gene.setAllele(null);
gene.applyMutation(0, 0.4d);
assertEquals(Math.round( (100 * (0.4d))), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.6
*/
public void testApplyMutation_8()
throws Exception {
IntegerGene gene = new IntegerGene(conf, 5, 100);
gene.setAllele(null);
RandomGeneratorForTesting rn = new RandomGeneratorForTesting(10);
rn.setNextDouble(0.8d);
conf.setRandomGenerator(rn);
gene.applyMutation(0, -0.4d);
assertEquals(Math.round( (100 - 5) * 0.8d + 5), gene.intValue());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testSetToRandomValue_0()
throws Exception {
Gene gene = new IntegerGene(conf, 1, 6);
gene.setAllele(new Integer(5));
gene.setToRandomValue(new RandomGeneratorForTesting(0.2d));
assertEquals(new Integer( (int) (0.2d * (6 - 1) + 1)), gene.getAllele());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testSetToRandomValue_1()
throws Exception {
Gene gene = new IntegerGene(conf, -1, 7);
gene.setAllele(new Integer(4));
gene.setToRandomValue(new RandomGeneratorForTesting(0.3d));
assertEquals(new Integer( (int) (0.3d * (7 + 1) - 1)), gene.getAllele());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testSetToRandomValue_2()
throws Exception {
Configuration conf = new ConfigurationForTesting();
Gene gene = new IntegerGene(conf, -2, -1);
gene.setAllele(new Integer(4));
gene.setToRandomValue(new RandomGeneratorForTesting(0.8d));
assertEquals(new Integer( (int) (0.8d * ( -1 + 2) - 2)), gene.getAllele());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testSetToRandomValue_3()
throws Exception {
IntegerGene gene = new IntegerGene(conf, 0, 8);
gene.setAllele(new Integer(5));
gene.setToRandomValue(new RandomGeneratorForTesting(4));
if (gene.intValue() < 0
|| gene.intValue() > 8) {
fail();
}
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.2
*/
public void testSetToRandomValue_4()
throws Exception {
IntegerGene gene = new IntegerGene(conf, 1, 6);
gene.setAllele(new Integer(2));
gene.setToRandomValue(new RandomGeneratorForTesting(3));
if (gene.intValue() < 1
|| gene.intValue() > 6) {
fail();
}
}
/**
* Exposing bug #1357474.
* @throws Exception
*
* @author Klaus Meffert
* @since 2.5
*/
public void testSetToRandomValue_5()
throws Exception {
Configuration conf = new ConfigurationForTesting();
Gene gene = new IntegerGene(conf, 1, 3);
gene.setAllele(new Integer(4));
gene.setToRandomValue(new RandomGeneratorForTesting(0.95d));
assertEquals(new Integer(3), gene.getAllele());
gene.setToRandomValue(new RandomGeneratorForTesting(0.05d));
assertEquals(new Integer(1), gene.getAllele());
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testSetEnergy_0()
throws Exception {
BaseGene gene = new IntegerGene(conf);
assertEquals(0.0, gene.getEnergy(), DELTA);
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.4
*/
public void testSetEnergy_1()
throws Exception {
BaseGene gene = new IntegerGene(conf);
gene.setEnergy(2.3);
assertEquals(2.3, gene.getEnergy(), DELTA);
gene.setEnergy( -55.8);
assertEquals( -55.8, gene.getEnergy(), DELTA);
gene.setEnergy(0.5);
gene.setEnergy(0.8);
assertEquals(0.8, gene.getEnergy(), DELTA);
}
/**
* Descendent of IntegerGene being virtually the same but of a different
* class so that equals and compareTo should signal a difference.
*
* @author Klaus Meffert
* @since 2.4
*/
class IntegerGene2
extends IntegerGene {
public IntegerGene2()
throws InvalidConfigurationException {
super(Genotype.getStaticConfiguration());
}
public IntegerGene2(final Configuration a_conf, int a_lowerBounds,
int a_upperBounds)
throws InvalidConfigurationException {
super(a_conf, a_lowerBounds, a_upperBounds);
}
}
class GeneConstraintChecker
implements IGeneConstraintChecker {
public boolean verify(Gene a_gene, Object a_alleleValue,
IChromosome a_chrom, int a_index) {
return true;
}
}
/**
* @throws Exception
*
* @author Klaus Meffert
* @since 2.6
*/
public void testGetBounds_0()
throws Exception {
IntegerGene gene = new IntegerGene(conf, 2, 5);
assertEquals(2, gene.getLowerBounds());
assertEquals(5, gene.getUpperBounds());
}
/**
* @throws Exception
*
* @author David Kemp
* @since 3.4.4
*/
public void testIntegerGeneSupportsFullIntegerRange()
throws Exception {
Gene gene = new IntegerGene(conf, Integer.MIN_VALUE, Integer.MAX_VALUE);
gene.setAllele(new Integer(5));
gene.setToRandomValue(new RandomGeneratorForTesting(0.2d));
int expectedValue = (int) (Integer.MIN_VALUE +
Math.round( (0.2d *
( (long) Integer.MAX_VALUE - (long) Integer.MIN_VALUE))));
assertEquals(new Integer(expectedValue), gene.getAllele());
}
/**
* @throws Exception
*
* @author David Kemp
* @since 3.4.4
*/
public void testMapValueToWithinBoundsSupportsFullIntegerRange()
throws Exception {
conf.setRandomGenerator(new RandomGeneratorForTesting(0.2d));
int lower = Integer.MIN_VALUE + 1;
int upper = Integer.MAX_VALUE;
IntegerGene gene = new IntegerGene(conf, lower, upper);
gene.setAllele(Integer.MIN_VALUE);
int expectedValue = (int) (lower +
Math.round( (0.2d *
( (long) upper - (long) lower))));
assertEquals(new Integer(expectedValue), gene.getAllele());
}
/**
* @throws Exception
*
* @author David Kemp
* @since 3.4.4
*/
public void testApplyMutationSupportsFullIntegerRange()
throws Exception {
IntegerGene gene = new IntegerGene(conf, Integer.MIN_VALUE,
Integer.MAX_VALUE);
gene.setAllele(null);
gene.applyMutation(0, 0.4d);
double range = ( (long) Integer.MAX_VALUE - (long) Integer.MIN_VALUE) *
0.4d;
int expectedValue = (int) (range + Integer.MIN_VALUE);
assertEquals(expectedValue, gene.intValue());
}
}
| lgpl-3.0 |
simeshev/parabuild-ci | src/org/parabuild/ci/versioncontrol/VersionControlRemoteCommand.java | 10806 | /*
* Parabuild CI licenses this file to You under the LGPL 2.1
* (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.gnu.org/licenses/lgpl-3.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parabuild.ci.versioncontrol;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.parabuild.ci.build.AgentFailureException;
import org.parabuild.ci.common.IoUtils;
import org.parabuild.ci.common.StringUtils;
import org.parabuild.ci.configuration.SystemConfigurationManagerFactory;
import org.parabuild.ci.error.Error;
import org.parabuild.ci.error.ErrorManagerFactory;
import org.parabuild.ci.object.SystemProperty;
import org.parabuild.ci.process.RemoteCommand;
import org.parabuild.ci.process.TimeoutCallback;
import org.parabuild.ci.remote.Agent;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class incorporates behaviour common for all version
* control commands. All version control commands should inherit
* this class.
*/
public class VersionControlRemoteCommand extends RemoteCommand {
private static final Log log = LogFactory.getLog(VersionControlRemoteCommand.class);
/**
* Maximum number of lines to be accumulated from stderr.
*/
private static final int MAX_ACCUMULATED_ERROR_LINES = 10;
/**
* An StderrLineProcessorto process stderr output.
*/
private StderrLineProcessor stderrLineProcessor = new DefaultStderrLineProcessor();
private boolean respectErrorCode = false;
private boolean showCurrentDirectoryInError = false;
/**
* Creates VersionControlRemoteCommand that uses system-wide
* timeout for version control commands
*
* @param agent
* @param addAutomaticSignatureToEnvironment
*
*/
protected VersionControlRemoteCommand(final Agent agent, final boolean addAutomaticSignatureToEnvironment) throws AgentFailureException {
super(agent, addAutomaticSignatureToEnvironment, getRetryTimes(), getRetryIntervalSecs(), getIgnoreErrorPatterns());
setTimeoutSecs(SystemConfigurationManagerFactory.getManager().getSystemVCSTimeout() * 60);
setTimeoutCallback(new VersionControlTimeoutCallback(this));
if (addAutomaticSignatureToEnvironment) {
try {
addEnvironment("PARABUILD_CHECKOUT_DIR", agent.getCheckoutDirName());
} catch (IOException e) {
IoUtils.ignoreExpectedException(e);
}
}
showCurrentDirectoryInError = true;
}
/**
* Helper method to remove unencrypted password from a String
* containing command.
*/
public static String removePasswordFromDebugString(final String string) {
if (StringUtils.isBlank(string)) return string;
final Pattern pattern = Pattern.compile("[-]+[pPyY][\\w]*[\\W]*[\\w]*[\\W]?");
final Matcher matcher = pattern.matcher(string);
return matcher.replaceAll("");
}
/**
* If set to true will fail if the error code returned by
* command execution is not zero.
*
* @param respectErrorCode
*/
protected void setRespectErrorCode(final boolean respectErrorCode) {
this.respectErrorCode = respectErrorCode;
}
protected static void appendCommand(final StringBuffer cmd, final String name, final String value) {
cmd.append(' ').append(name);
cmd.append(' ').append(value);
}
protected void appendCommand(final StringBuffer cmd, final String name, final int value) {
appendCommand(cmd, name, Integer.toString(value));
}
protected static void appendCommand(final StringBuffer cmd, final String name) {
cmd.append(' ').append(name);
}
protected static void appendCommandIfNotBlank(final StringBuffer cmd, final String name, final String value) {
if (!StringUtils.isBlank(value)) {
cmd.append(' ').append(name);
cmd.append(' ').append(value);
}
}
protected static void appendCommandIfNotBlankQuoted(final StringBuffer cmd, final String name, final String value) {
if (!StringUtils.isBlank(value)) {
cmd.append(' ').append(name);
cmd.append(' ').append(StringUtils.putIntoDoubleQuotes(value));
}
}
/**
* Callback method - this method is called right after call to
* execute.
* <p/>
* This method can be overriden by children to accomodate
* post-execute processing such as command log analisys e.t.c.
*
* @param resultCode - execute command result code.
*/
protected void postExecute(final int resultCode) throws IOException, AgentFailureException {
super.postExecute(resultCode);
// if (log.isDebugEnabled()) log.debug("analyze error log");
BufferedReader reader = null;
try {
if (getStderrFile().exists() && getStderrFile().length() > 0) {
final StringBuffer message = new StringBuffer(500);
reader = new BufferedReader(new FileReader(getStderrFile()));
String line = reader.readLine();
int index = 0;
while (line != null && index < MAX_ACCUMULATED_ERROR_LINES) {
final int code = stderrLineProcessor.processLine(index, line);
switch (code) {
case StderrLineProcessor.RESULT_ADD_TO_ERRORS:
message.append('\n').append(" ").append(line);
break;
case StderrLineProcessor.RESULT_IGNORE: // NOPMD
// do nothing
break;
default:
log.warn("Unexpected code: " + code);
break;
}
line = reader.readLine();
index++;
}
// trow exception if there are any accumulated messages
if (message.length() > 0) {
throw new IOException("Errors while executing command \"" + removePasswordFromDebugString(getCommand()) + ". \nMessage: " + message + '.' + (showCurrentDirectoryInError ? "" : " \nCurrent directory: " + remoteCurrentDir));
}
}
// if we are hear it means nothing has happened
if (respectErrorCode && resultCode != 0) {
throw new IOException("Error while executing comand \"" + removePasswordFromDebugString(getCommand()) + "\". The command returned non-zero error code: " + resultCode);
}
} finally {
IoUtils.closeHard(reader);
}
}
/**
* Sets error output processor. If not set, {@link StderrLineProcessor} is used.
*
* @param stderrLineProcessor to set.
*/
public final void setStderrLineProcessor(final StderrLineProcessor stderrLineProcessor) {
this.stderrLineProcessor = stderrLineProcessor;
}
/**
* Returns system level retry times on error.
*
* @return system level retry times on error.
*/
private static int getRetryTimes() {
return SystemConfigurationManagerFactory.getManager().getSystemPropertyValue(SystemProperty.RETRY_VCS_COMMAND_TIMES, 1);
}
/**
* Returns system level retry interval on error.
*
* @return system level retry interval on error.
*/
private static int getRetryIntervalSecs() {
return SystemConfigurationManagerFactory.getManager().getSystemPropertyValue(SystemProperty.RETRY_VCS_COMMAND_INTERVAL, 10);
}
/**
* Returns system level retry patterns on error.
*
* @return system level retry patterns on error.
*/
private static List getIgnoreErrorPatterns() {
return StringUtils.multilineStringToList(SystemConfigurationManagerFactory.getManager().getSystemPropertyValue(SystemProperty.RETRY_VCS_COMMAND_PATTERNS, SystemProperty.DEFAULT_RETRY_VCS_COMMAND_PATTERNS));
}
/**
* Version control's command timeout callback. Responsible for
* reporting timeouts and hangs when a VCS command is
* executed.
*/
private static final class VersionControlTimeoutCallback implements TimeoutCallback {
private final VersionControlRemoteCommand commandToReportOn;
/**
* Constructor.
*
* @param commandToReportOn VersionControlRemoteCommand that
* this timeout callback will report on in case of time out.
*/
public VersionControlTimeoutCallback(final VersionControlRemoteCommand commandToReportOn) {
this.commandToReportOn = commandToReportOn;
}
/**
* This callback method is called when watched command is
* timed out but before watchdog tries to kill command.
*/
public void commandTimedOut() {
final Error error = new Error();
error.setErrorLevel(Error.ERROR_LEVEL_ERROR);
error.setBuildID(commandToReportOn.agent.getActiveBuildID());
error.setHostName(commandToReportOn.getAgentHost().getHost());
error.setDescription("Version control command timed out");
error.setDetails("Version control command \"" + removePasswordFromDebugString(commandToReportOn.getCommand()) + "\" has not exited after " + commandToReportOn.getTimeoutSecs() + " seconds. Parabuild will try to stop the command. System may require immediate attention of a build administrator.");
error.setPossibleCause("Version control system has become unavailable or the timeout value is set too low.");
ErrorManagerFactory.getErrorManager().reportSystemError(error);
}
/**
* This callback method is called when watched command is
* identified as hung.
*/
public void commandHung() {
final Error error = new Error();
error.setErrorLevel(Error.ERROR_LEVEL_FATAL);
error.setBuildID(commandToReportOn.agent.getActiveBuildID());
error.setHostName(commandToReportOn.getAgentHost().getHost());
error.setDescription("Version control command hung");
error.setDetails("Version control command \"" + removePasswordFromDebugString(commandToReportOn.getCommand()) + "\" hung after " + commandToReportOn.getTimeoutSecs() + " seconds timeout. System attempted and failed to stop the command. System requires immediate attention of a build administrator. The command should be stopped manually.");
error.setPossibleCause("Version control system has become unavailable or time out value is set too low.");
ErrorManagerFactory.getErrorManager().reportSystemError(error);
}
}
/**
* If true a error message will show the current directory.
*
* @param showCurrentDirectoryInError
*/
protected final void setShowCurrentDirectoryInError(final boolean showCurrentDirectoryInError) {
this.showCurrentDirectoryInError = showCurrentDirectoryInError;
}
}
| lgpl-3.0 |
SergiyKolesnikov/fuji | examples/Chat_casestudies/chat-carsten-schulze/features/GUI/client/ChatPlugin.java | 684 | package client;
import common.Message;
/**
* Listener that gets informed every time when the chat client receives a new
* message
*/
public interface ChatPlugin {
/**
* Get Type of Plugin bitcoded:
* Bit -
* 0 - 0x01 Dummy Plugin
* 1 - 0x02 Authentication
* 2 - 0x04 Encryption
* 3 - 0x08 Filter
* 4 - 0x10 UserInterface
* 5 - 0x20
* 6 - 0x40
* 7 - 0x80 Error
*
* @return Byte containing the typeflags
*/
public byte getType();
/**
* The given message is processed and than returned by plugin
*
* @param msg Message for the Plugin
* @return Processed Message form Plugin
*/
public Message process(Message msg);
}
| lgpl-3.0 |
dresden-ocl/dresdenocl | plugins/org.dresdenocl.tools.CWM/src/orgomg/cwm/analysis/transformation/impl/TransformationUseImpl.java | 3832 | /**
* <copyright>
* </copyright>
*
* $Id$
*/
package orgomg.cwm.analysis.transformation.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import orgomg.cwm.analysis.transformation.TransformationPackage;
import orgomg.cwm.analysis.transformation.TransformationUse;
import orgomg.cwm.objectmodel.core.impl.DependencyImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Use</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link orgomg.cwm.analysis.transformation.impl.TransformationUseImpl#getType <em>Type</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class TransformationUseImpl extends DependencyImpl implements TransformationUse {
/**
* The default value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected static final String TYPE_EDEFAULT = null;
/**
* The cached value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected String type = TYPE_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TransformationUseImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return TransformationPackage.Literals.TRANSFORMATION_USE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getType() {
return type;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setType(String newType) {
String oldType = type;
type = newType;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, TransformationPackage.TRANSFORMATION_USE__TYPE, oldType, type));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case TransformationPackage.TRANSFORMATION_USE__TYPE:
return getType();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case TransformationPackage.TRANSFORMATION_USE__TYPE:
setType((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case TransformationPackage.TRANSFORMATION_USE__TYPE:
setType(TYPE_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case TransformationPackage.TRANSFORMATION_USE__TYPE:
return TYPE_EDEFAULT == null ? type != null : !TYPE_EDEFAULT.equals(type);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (type: ");
result.append(type);
result.append(')');
return result.toString();
}
} //TransformationUseImpl
| lgpl-3.0 |
warlockcodes/Null-Engine | Game Engine/src/nullEngine/object/component/physics/PlaneCollider.java | 365 | package nullEngine.object.component.physics;
import com.bulletphysics.collision.shapes.StaticPlaneShape;
import math.Vector4f;
import javax.vecmath.Vector3f;
public class PlaneCollider extends Collider {
public PlaneCollider(Vector4f normal, float buffer) {
setCollisionShape(new StaticPlaneShape(new Vector3f(normal.x, normal.y, normal.z), buffer));
}
}
| lgpl-3.0 |
MesquiteProject/MesquiteArchive | releases/Mesquite1.12/Mesquite Project/Source/mesquite/lib/table/RowNamesPanel.java | 17430 | /* Mesquite source code. Copyright 1997-2006 W. Maddison and D. Maddison.
Version 1.12, September 2006.
Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code.
The commenting leaves much to be desired. Please approach this source code with the spirit of helping out.
Perhaps with your help we can be more than a few, and make Mesquite better.
Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY.
Mesquite's web site is http://mesquiteproject.org
This source code and its compiled class files are free and modifiable under the terms of
GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html)
*/
package mesquite.lib.table;
import java.awt.*;
import java.awt.event.*;
import mesquite.lib.*;
import java.io.*;
/* ======================================================================== */
/** A panel for row headings in a MesquiteTable.*/
public class RowNamesPanel extends EditorPanel {
MesquiteTable table;
public int width, height;
public RowNamesPanel (MesquiteTable table , int w, int h) {
super(table);
this.table=table;
//setBackground(ColorDistribution.medium[table.colorScheme]);
setBackground(Color.white);
setTableUnitSize(w, h);
}
public void setTableUnitSize (int w, int h) {
this.width=w;
this.height=h;
setSize(w, height);
}
public void setWidth (int w) {
this.width=w;
setSize(w, height);
}
/*@@@...............................................................................................................*/
/** returns in which column x lies, -1 if to left, -2 if to right.*/
public int findColumn(int x) {
return -1; //if left of grabbers?
}
/*@@@...............................................................................................................*/
/** returns in which row y lies, -1 if above all rows, -2 if below all rows.*/
public int findRow(int y) {
if (y<=0)
return -1;
int ry = 0;
for (int row=table.firstRowVisible; (row<table.numRowsTotal) && (ry<y); row++) {
ry += table.rowHeights[row];
if (row>= table.numRowsTotal)
return -1;
else if (ry>=y)
return row;
}
return -2;//past the last row
}
/*@@@...............................................................................................................*/
/** returns in which column x lies, -1 if to left, -2 if to right.*/
public int findRegionInCellH(int x) {
if (x<=0)
return 50;
return (x-startOfColumn(-1))*100/(columnWidth(-1)- startOfColumn(-1));
}
/*@@@...............................................................................................................*/
/** returns in which column x lies, -1 if to left, -2 if to right.*/
public int findRegionInCellV(int y) {
if (y<=0)
return 50;
int ry = 0;
for (int row=table.firstRowVisible; (row<table.numRowsTotal) && (ry<y); row++) {
ry += table.rowHeights[row];
if (row>= table.numRowsTotal)
return 50;
else if (ry>=y) {
int dYB = ry-y; //distance from bottom edge to
int dYU = y - (ry-table.rowHeights[row]); //distance from left edge to
return dYU*100/(dYB+dYU);
}
}
return 50;
}
public int startOfColumn(int column){
return table.getColumnGrabberWidth()-2;
}
public int firstColumnVisible(){
return -1;
}
public int numColumnsVisible(){
return 1;
}
public int columnWidth(int column) { //todo: why does this not subtract grabbers, but ColumnNames does?
return width;
}
public void textReturned(int column, int row, String text, CommandRecord commandRec){
table.returnedRowNameText(row, text, commandRec);
}
public String getText(int column, int row){
return table.getRowNameText(row);
}
public void deselectCell(int column,int row){
table.deselectRowName(row);
}
public void redrawCell(int column, int row){
Graphics g = getGraphics();
if (g!=null) {
redrawName(g, row);
g.dispose();
}
}
public void redrawName(Graphics g, int row) {
int top = table.getFirstRowVisible();
if (row<top) //TODO: should also fail to draw if to big
return;
if (row == returningRow){
return; //don't draw if text about to be returned to cell, and will soon be redrawn anyway
}
int leftSide = startOfColumn(-1);
int topSide = startOfRow(row);
if (topSide>getBounds().height || topSide+rowHeight(row)<0)
return;
Shape clip = g.getClip();
g.setClip(0,topSide,columnWidth(-1), rowHeight(row));
prepareCell(g, 1,topSide+1,columnWidth(-1), rowHeight(row)-2, table.focusRow == row, table.isRowNameSelected(row) || table.isRowSelected(row), table.getCellDimmed(-1, row), table.isRowNameEditable(row));
g.setClip(0,0, getBounds().width, getBounds().height);
if (table.frameRowNames) {
Color cg = g.getColor();
g.setColor(Color.gray);
g.drawLine(0, topSide+rowHeight(row), width, topSide+rowHeight(row));
g.setColor(cg);
}
Font fnt = null;
boolean doFocus = table.focusRow == row && table.boldFont !=null;
if (doFocus){
fnt = g.getFont();
g.setFont(table.boldFont);
}
Color oldColor = g.getColor();
if (table.showRowGrabbers) {
if (table.showRowNumbers)
table.drawRowColumnNumber(g,row,true,0,topSide+1, table.getRowGrabberWidth(),rowHeight(row)-2);
else
table.drawRowColumnNumberBox(g,row,true,0,topSide+1, table.getRowGrabberWidth(),rowHeight(row)-2);
g.setClip(0+table.getRowGrabberWidth(),topSide, width-table.getRowGrabberWidth(),rowHeight(row));
table.setRowNameColor(g, row);
table.drawRowNameCell(g, 0+table.getRowGrabberWidth(),topSide, width-table.getRowGrabberWidth(),rowHeight(row), row);
}
else {
g.setClip(0,topSide, width,rowHeight(row));
table.setRowNameColor(g, row);
table.drawRowNameCell(g, 0,topSide, width,rowHeight(row), row);
}
g.setColor(oldColor);
if (doFocus && fnt !=null){
g.setFont(fnt);
}
g.setClip(0,0, getBounds().width, getBounds().height);
g.setColor(Color.black);
if (table.getDropDown(-1, row)) {
int offset = 0;
if (table.showRowGrabbers)
offset = table.getRowGrabberWidth();
dropDownTriangle.translate(1 + offset,topSide + 1);
g.setColor(Color.white);
g.drawPolygon(dropDownTriangle);
g.setColor(Color.black);
g.fillPolygon(dropDownTriangle);
dropDownTriangle.translate(-(1 + offset),-(topSide + 1));
}
g.setClip(clip);
g.drawLine(width-1, 0, width-1, height);
}
public void repaint(){
checkEditFieldLocation();
super.repaint();
}
public void paint(Graphics g) {
if (MesquiteWindow.checkDoomed(this))
return;
try {
table.checkResetFont(g);
int lineY = 0;
int oldLineY=lineY;
int resetWidth = getBounds().width;
int resetHeight = getBounds().height;
width = resetWidth;//this is here to test if width/height should be reset here
height = resetHeight;
Shape clip = g.getClip();
for (int r=table.firstRowVisible; (r<table.numRowsTotal) && (lineY<height); r++) {
redrawName(g, r);
}
g.setClip(0,0, getBounds().width, getBounds().height);
if (false && getEditing()) {
TextField edit = getEditField();
if (edit!= null)
edit.repaint();
}
if ((endOfLastRow()>=0) && (endOfLastRow()<table.matrixHeight)) {
g.setColor(ColorDistribution.medium[table.colorScheme]);
g.fillRect(0, endOfLastRow()+1, getBounds().width, getBounds().height);
}
g.setColor(Color.black);
if (table.frameRowNames)
g.drawRect(0, 0, width, height-1);
g.drawLine(width-1, 0, width-1, height);
g.setClip(clip);
}
catch (Throwable e){
MesquiteMessage.warnProgrammer("Exception or Error in drawing table (RNP); details in Mesquite log file");
PrintWriter pw = MesquiteFile.getLogWriter();
if (pw!=null)
e.printStackTrace(pw);
}
MesquiteWindow.uncheckDoomed(this);
}
public void print(Graphics g) {
int lineY = 0;
int oldLineY=lineY;
Shape clip = g.getClip();
g.setClip(0,0, getBounds().width, getBounds().height);
for (int r=0; (r<table.numRowsTotal); r++) {
lineY += table.rowHeights[r];
g.setClip(0,oldLineY, width, table.rowHeights[r]);
g.setColor(Color.black);
table.drawRowNameCell(g, 0,startOfRow(r), width,rowHeight(r), r);
//table.drawRowNameCell(g, 20,oldLineY, width,table.rowHeights[r], r);
g.setColor(Color.black);
oldLineY=lineY;
}
g.setClip(0,0, width, table.getTotalRowHeight());
g.setColor(Color.black);
g.drawLine(width-1, 0, width-1, table.getTotalRowHeight());
g.setClip(clip);
}
public void OLDprint(Graphics g) {
int lineY = 0;
int oldLineY=lineY;
int resetWidth = getBounds().width;
int resetHeight = getBounds().height;
width = resetWidth;//this is here to test if width/height should be reset here
height = resetHeight;
Shape clip = g.getClip();
for (int r=table.firstRowVisible; (r<table.numRowsTotal) && (lineY<height); r++) {
lineY += table.rowHeights[r];
/*
if (table.frameRowNames) {
g.setColor(Color.gray);
g.drawLine(0, lineY, width, lineY);
}
*/
g.setColor(Color.black);
g.setClip(0,oldLineY, width,table.rowHeights[r]);
table.drawRowNameCell(g, 0,oldLineY, width,table.rowHeights[r], r);
g.setClip(clip);
g.setColor(Color.black);
oldLineY=lineY;
}
g.setClip(0,0, getBounds().width, getBounds().height);
g.setColor(Color.black);
g.drawLine(width-1, 0, width-1, height);
g.setClip(clip);
}
/*...............................................................................................................*/
int touchY = -1;
int lastY=-1;
int touchRow;
int previousRowDragged = -1;
/*...............................................................................................................*/
public void mouseDown(int modifiers, int clickCount, long when, int x, int y, MesquiteTool tool) {
if (!(tool instanceof TableTool))
return;
touchY=-1;
touchRow=-1;
int possibleTouch = findRow(y);
int regionInCellH = findRegionInCellH(x);
int regionInCellV =findRegionInCellV(y);
boolean isArrowEquivalent = ((TableTool)tool).isArrowKeyOnRow(x,table);
if (possibleTouch>=0 && possibleTouch<table.numRowsTotal) {
if (tool != null && isArrowEquivalent && table.getUserMoveRow() && table.isRowSelected(possibleTouch) && !MesquiteEvent.shiftKeyDown(modifiers) && !MesquiteEvent.commandOrControlKeyDown(modifiers)) {
touchY=y;
lastY = y;
touchRow=possibleTouch;
table.shimmerHorizontalOn(touchY);
}
else if ((table.showRowGrabbers) && (x<table.getRowGrabberWidth())) {
if (((TableTool)tool).getIsBetweenRowColumnTool() && !isArrowEquivalent)
possibleTouch = table.findRowBeforeBetween(y);
table.rowTouched(isArrowEquivalent, possibleTouch,regionInCellH, regionInCellV,modifiers);
if (tool != null && isArrowEquivalent && table.getUserMoveRow() && table.isRowSelected(possibleTouch) && !MesquiteEvent.shiftKeyDown(modifiers) && !MesquiteEvent.commandOrControlKeyDown(modifiers)) {
touchY=y;
lastY = MesquiteInteger.unassigned;;
touchRow=possibleTouch;
//table.shimmerHorizontalOn(touchY);
}
}
else if (isArrowEquivalent) {
table.rowNameTouched(possibleTouch,regionInCellH, regionInCellV, modifiers,clickCount);
}
else if (tool!=null && ((TableTool)tool).getWorksOnRowNames()) {
if (((TableTool)tool).getIsBetweenRowColumnTool())
possibleTouch = table.findRowBeforeBetween(y);
touchY=y;
lastY = y;
touchRow=possibleTouch;
table.rowNameTouched(possibleTouch,regionInCellH, regionInCellV, modifiers,clickCount);
}
}
else if (possibleTouch==-2 && ((TableTool)tool).getWorksBeyondLastRow())
table.rowTouched(isArrowEquivalent,possibleTouch,regionInCellH, regionInCellV,modifiers);
else if (tool != null && tool.isArrowTool()){
table.offAllEdits();
if (table.anythingSelected()) {
table.deselectAllNotify();
table.repaintAll();
}
}
}
/*...............................................................................................................*/
public void mouseDrag(int modifiers, int x, int y, MesquiteTool tool) {
if (touchRow>=0 && tool != null)
if (((TableTool)tool).isArrowKeyOnRow(x,table)) {
if (table.getUserAdjustColumn()==MesquiteTable.RESIZE) {
table.shimmerHorizontalOff(lastY);
table.shimmerHorizontalOn(y);
lastY=y;
}
else if (table.getUserMoveColumn()) {
table.shimmerHorizontalOff(lastY);
table.shimmerHorizontalOn(y);
lastY=y;
}
}
else if (((TableTool)tool).getWorksOnRowNames()) {
int dragRow = findRow(y);
int regionInCellH = findRegionInCellH(x);
int regionInCellV =findRegionInCellV(y);
((TableTool)tool).cellDrag(-1,dragRow,regionInCellH,regionInCellV,modifiers);
if (((TableTool)tool).getEmphasizeRowsOnMouseDrag()){
table.emphasizeRow(previousRowDragged,dragRow, touchRow, false, Color.blue);
previousRowDragged = dragRow;
}
}
}
/*...............................................................................................................*/
public void mouseUp(int modifiers, int x, int y, MesquiteTool tool) {
if (touchRow>=0 && tool != null)
if (((TableTool)tool).isArrowKeyOnRow(x,table)) {
if (!table.anyRowSelected()) {
if (table.getUserAdjustRow()==MesquiteTable.RESIZE) {
/*table.shimmerVerticalOff(lastX);
int newRH = table.rowHeights[touchRow] + x-touchX;
if (newRH > 16) {
table.setRowHeight(touchRow, newRH);
table.rowHeightsAdjusted.setBit(touchRow);
table.repaintAll();
}*/
}
if (table.getUserMoveRow())
table.shimmerHorizontalOff(lastY);
}
/*@@@*/
else {
if (table.getUserMoveRow()) {
table.shimmerHorizontalOff(lastY);
int dropRow = table.findRowBeforeBetween(y);
if (dropRow == -2)
dropRow = table.getNumRows();
if (dropRow != touchRow && (dropRow != touchRow-1) && !table.isRowSelected(dropRow)) //don't move dropped on row included in selection
table.selectedRowsDropped(dropRow);
}
else if (table.getUserAdjustRow()==MesquiteTable.RESIZE)
;//table.shimmerVerticalOff(lastX);
}
}
else if (((TableTool)tool).getWorksOnRowNames()) {
int dropRow = findRow(y);
int regionInCellH = findRegionInCellH(x);
int regionInCellV =findRegionInCellV(y);
((TableTool)tool).cellDropped(-1,dropRow,regionInCellH,regionInCellV,modifiers);
}
}
/*...............................................................................................................*/
public void mouseExited(int modifiers, int x, int y, MesquiteTool tool) {
if (!(table.editingAnything() || table.singleTableCellSelected()) && tool != null && tool.isArrowTool())
setWindowAnnotation("", null);
setCursor(Cursor.getDefaultCursor());
int row = findRow(y);
table.mouseExitedCell(modifiers, -1, -1, row, -1, tool);
}
/*...............................................................................................................*/
public void setCurrentCursor(int modifiers, int x, int row, MesquiteTool tool) {
if (tool == null || !(tool instanceof TableTool))
setCursor(getDisabledCursor());
else if (row>=0 && row<table.numRowsTotal) {
if (((TableTool)tool).isArrowKeyOnRow(x,table)) {
setCursor(table.getHandCursor());
if (!(table.getUserMoveRow() && table.isRowSelected(row) && !MesquiteEvent.shiftKeyDown(modifiers) && !MesquiteEvent.controlKeyDown(modifiers))) {
if (!(table.editingAnything() || table.singleTableCellSelected())) {
String s = table.getRowComment(row);
if (s!=null)
setWindowAnnotation(s, "Footnote above refers to " + table.getRowNameText(row));
else
setWindowAnnotation("", null);
}
}
}
else if (((TableTool)tool).getWorksOnRowNames())
setCursor(tool.getCursor());
else
setCursor(getDisabledCursor());
}
else if (((TableTool)tool).getWorksBeyondLastRow() && (row==-2))
setCursor(tool.getCursor());
else
setCursor(getDisabledCursor());
}
/*...............................................................................................................*/
public void mouseEntered(int modifiers, int x, int y, MesquiteTool tool) {
if (table == null)
return;
int row = findRow(y);
setCurrentCursor(modifiers, x, row, tool);
table.mouseInCell(modifiers, -1, -1, row, -1, tool);
}
/*...............................................................................................................*/
public void mouseMoved(int modifiers, int x, int y, MesquiteTool tool) {
int row = findRow(y);
setCurrentCursor(modifiers, x, row, tool);
table.mouseInCell(modifiers, -1, -1, row, -1, tool);
}
/*...............................................................................................................*/
public void tabPressed(KeyEvent e){
if (!getEditing())
return;
if (table.getCellsEditable()){
e.consume();
table.editMatrixCell(0, editField.getRow());
}
}
}
| lgpl-3.0 |
wmaddisn/MesquiteCore | Source/mesquite/charMatrices/ReshuffleCharacter/ReshuffleCharacter.java | 12391 | /* Mesquite source code. Copyright 1997 and onward, W. Maddison and D. Maddison.
Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code.
The commenting leaves much to be desired. Please approach this source code with the spirit of helping out.
Perhaps with your help we can be more than a few, and make Mesquite better.
Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY.
Mesquite's web site is http://mesquiteproject.org
This source code and its compiled class files are free and modifiable under the terms of
GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html)
*/
package mesquite.charMatrices.ReshuffleCharacter;
/*~~ */
import java.util.*;
import java.awt.*;
import mesquite.lib.*;
import mesquite.lib.characters.*;
import mesquite.lib.duties.*;
/* ======================================================================== */
public class ReshuffleCharacter extends CharacterSource {
public void getEmployeeNeeds(){ //This gets called on startup to harvest information; override this and inside, call registerEmployeeNeed
EmployeeNeed e = registerEmployeeNeed(DataSearcher.class, getName() + " needs a source of the original character to be reshuffled.",
"The source of the original character is chosen initially.");
}
long currentShuffle=0;
int currentOriginalChar = -1; //to force a query
MatrixSourceCoord dataTask;
MCharactersDistribution matrix;
CharacterDistribution states;
int currentDataSet = 0;
RandomBetween randomTaxon;
MesquiteLong seed;
Taxa oldTaxa =null;
long originalSeed=System.currentTimeMillis(); //0L;
/*.................................................................................................................*/
public boolean startJob(String arguments, Object condition, boolean hiredByName) {
if (condition!=null)
dataTask = (MatrixSourceCoord)hireCompatibleEmployee( MatrixSourceCoord.class, condition, "Source of matrices for Reshuffle Character");
else
dataTask = (MatrixSourceCoord)hireEmployee( MatrixSourceCoord.class, "Source of matrices for Reshuffle Character");
if (dataTask == null)
return sorry(getName() + " couldn't start because no source of character matrices was obtained.");
currentShuffle = 0;
randomTaxon= new RandomBetween(originalSeed);
seed = new MesquiteLong(1);
seed.setValue(originalSeed);
addMenuItem("Shuffle Next Character", makeCommand("shuffleNext", this));
addMenuItem("Shuffle Previous Character", makeCommand("shufflePrevious", this));
addMenuItem("Choose Character to Shuffle", makeCommand("chooseCharacter", this));
addMenuItem("Set Seed (Reshuffle character)...", makeCommand("setSeed", this));
return true;
}
public void employeeQuit(MesquiteModule m){
iQuit();
}
/*.................................................................................................................*/
public Snapshot getSnapshot(MesquiteFile file) {
Snapshot temp = new Snapshot();
temp.addLine("getCharacterSource ", dataTask);
temp.addLine("setCharacter " + CharacterStates.toExternal(currentOriginalChar));
temp.addLine("setShuffle " + CharacterStates.toExternal((int)currentShuffle));
temp.addLine("setSeed " + originalSeed);
return temp;
}
/*.................................................................................................................*/
public Object doCommand(String commandName, String arguments, CommandChecker checker) {
if (checker.compare(this.getClass(), "Does shuffle of the next character", null, commandName, "shuffleNext")) {
if (currentOriginalChar>=matrix.getNumChars()-1)
currentOriginalChar=0;
else
currentOriginalChar++;
currentShuffle = 0;
((AdjustableDistribution)states).setParentCharacter(currentOriginalChar);
parametersChanged();
}
else if (checker.compare(this.getClass(), "Indicates which shuffle of the character", null, commandName, "setShuffle")) {
long s = MesquiteLong.fromString(parser.getFirstToken(arguments));
if (s >= 0 && MesquiteLong.isCombinable(s) && s != currentShuffle){
currentShuffle = s;
parametersChanged();
}
}
else if (checker.compare(this.getClass(), "Returns the source of matrices on which to do ordinations", null, commandName, "setCharacterSource")) { //TEMPORARY for data files using old system without coordinators
if (dataTask != null)
return dataTask.doCommand(commandName, arguments, checker);
}
else if (checker.compare(this.getClass(), "Returns employee that is character source", null, commandName, "getCharacterSource")) {
return dataTask;
}
else if (checker.compare(this.getClass(), "Sets the random number seed to that passed", "[long integer seed]", commandName, "setSeed")) {
long s = MesquiteLong.fromString(parser.getFirstToken(arguments));
if (!MesquiteLong.isCombinable(s)){
s = MesquiteLong.queryLong(containerOfModule(), "Random number seed", "Enter an integer value for the random number seed for character reshuffling", originalSeed);
}
if (MesquiteLong.isCombinable(s)){
originalSeed = s;
parametersChanged(); //?
}
}
else if (checker.compare(this.getClass(), "Queries user about which character to shuffle", null, commandName, "chooseCharacter")) {
int ic = chooseCharacter(matrix);
if (ic >= 0 && MesquiteInteger.isCombinable(ic)) {
currentOriginalChar = ic;
currentShuffle = 0;
((AdjustableDistribution)states).setParentCharacter(currentOriginalChar);
parametersChanged();
}
}
else if (checker.compare(this.getClass(), "Sets which character to shuffle", "[character number]", commandName, "setCharacter")) {
MesquiteInteger pos = new MesquiteInteger(0);
int icNum = MesquiteInteger.fromString(arguments, pos);
seed.setValue(originalSeed);
if (!MesquiteInteger.isCombinable(icNum))
return null;
int ic = CharacterStates.toInternal(icNum);
if (matrix == null || ((ic>=0) && (ic<=matrix.getNumChars()-1))) {
currentOriginalChar = ic;
currentShuffle = 0;
if (states !=null)
((AdjustableDistribution)states).setParentCharacter(currentOriginalChar);
parametersChanged();
}
}
else if (checker.compare(this.getClass(), "Does shuffle of the previous character", null, commandName, "shufflePrevious")) {
if (currentOriginalChar<=0)
currentOriginalChar=matrix.getNumChars()-1;
else
currentOriginalChar--;
currentShuffle = 0;
((AdjustableDistribution)states).setParentCharacter(currentOriginalChar);
parametersChanged();
}
else
return super.doCommand(commandName, arguments, checker);
return null;
}
private int chooseCharacter(MCharactersDistribution matrix){
if (matrix == null || matrix.getParentData()!=null) {
CharacterData data = matrix.getParentData();
String[] charNames = new String[data.getNumChars()];
for (int i=0; i<data.getNumChars(); i++)
charNames[i]= data.getCharacterName(i);
return ListDialog.queryList(containerOfModule(), "Choose character", "Choose character to shuffle", MesquiteString.helpString,charNames, 0);
}
return MesquiteInteger.queryInteger(containerOfModule(), "Choose character", "Number of character to shuffle ", 1);
}
/*.................................................................................................................*/
public CharacterDistribution getCharacter(Taxa taxa, int ic) {
dataCheck(taxa);
if (matrix == null)
return null;
currentShuffle = ic;
if (currentOriginalChar<matrix.getNumChars()&& currentOriginalChar>=0 && currentShuffle>=0) {
CharacterDistribution chs = matrix.getCharacterDistribution(currentOriginalChar);
if (chs == null)
return null;
states = (CharacterDistribution)chs.getAdjustableClone();
if (states == null)
return null;
((AdjustableDistribution)states).setParentCharacter(currentOriginalChar);
String mName = "";
if (matrix.getParentData() != null)
mName = " of " + matrix.getParentData().getName();
states.setName( "Shuffle " + currentShuffle + " of character " + CharacterStates.toExternal(currentOriginalChar) + mName);
randomTaxon.setSeed(originalSeed);
for (int i=0; i < currentShuffle; i++)
randomTaxon.nextInt();
randomTaxon.setSeed(randomTaxon.nextInt() + 1); //v. 1. 1 Oct 05, modified by adding 1 to prevent adjacent from simply being offsets
int nT1 = states.getNumTaxa()-1;
for (int i=0; i < nT1; i++) {
int sh = randomTaxon.randomIntBetween(i, nT1);
if (i!=sh)
((AdjustableDistribution)states).tradeStatesBetweenTaxa(i, sh);
}
return states;
}
else
return null;
}
/*.................................................................................................................*/
public int getNumberOfCharacters(Taxa taxa) {
dataCheck(taxa);
if (matrix == null)
return 0;
else
return MesquiteInteger.infinite;
}
/*.................................................................................................................*/
public void employeeParametersChanged(MesquiteModule employee, MesquiteModule source, Notification notification) {
matrix = null;
super.employeeParametersChanged( employee, source, notification);
}
/** Called to provoke any necessary initialization. This helps prevent the module's intialization queries to the user from
happening at inopportune times (e.g., while a long chart calculation is in mid-progress)*/
public void initialize(Taxa taxa){
dataCheck(taxa);
}
/*.................................................................................................................*/
private void dataCheck(Taxa taxa) {
if (matrix==null || oldTaxa != taxa) {
matrix = dataTask.getCurrentMatrix(taxa);
if (matrix == null)
currentOriginalChar = 0;
else if (currentOriginalChar<0 || currentOriginalChar>= matrix.getNumChars()) {
if (!MesquiteThread.isScripting())
currentOriginalChar = chooseCharacter(matrix);
if (!MesquiteInteger.isCombinable(currentOriginalChar) || currentOriginalChar<0 || currentOriginalChar>=matrix.getNumChars())
currentOriginalChar = 0;
}
currentShuffle = 0;
oldTaxa = taxa;
}
}
/*.................................................................................................................*/
/** returns the name of character ic*/
public String getCharacterName(Taxa taxa, int ic){
return "Shuffle " + ic + " of character ";
}
/*.................................................................................................................*/
public String getParameters() {
if (matrix==null) return "";
return "Character reshuffle: " + matrix.getName() + ". [seed: " + originalSeed + "]";
}
/*.................................................................................................................*/
public String getName() {
return "Reshuffle Character";
}
/*.................................................................................................................*/
public boolean showCitation() {
return true;
}
/*.................................................................................................................*/
public boolean isPrerelease() {
return false;
}
/*.................................................................................................................*/
/** returns an explanation of what the module does.*/
public String getExplanation() {
return "Supplies characters that are reshufflings of an existing character." ;
}
/*.................................................................................................................*/
public CompatibilityTest getCompatibilityTest() {
return new CharacterStateTest();
}
}
| lgpl-3.0 |
magenta-aps/db-preservation-toolkit | dbptk-model/src/main/java/com/databasepreservation/model/data/BinaryCell.java | 2407 | /**
*
*/
package com.databasepreservation.model.data;
import java.io.InputStream;
import java.sql.Blob;
import com.databasepreservation.common.BlobInputStreamProvider;
import com.databasepreservation.common.InputStreamProvider;
import com.databasepreservation.common.TemporaryPathInputStreamProvider;
import com.databasepreservation.model.exception.ModuleException;
/**
* Represents a cell of BLOB type
*
* @author Luis Faria <lfaria@keep.pt>
* @author Bruno Ferreira <bferreira@keep.pt>
*/
public class BinaryCell extends Cell implements InputStreamProvider {
private InputStreamProvider inputStreamProvider;
/**
* Creates a binary cell. This binary cell will mostly just be a wrapper
* around the SQL Blob object.
*
* @param id
* the cell id
* @param blob
* the SQL Blob object, where the blob value will be read from
*/
public BinaryCell(String id, Blob blob) {
super(id);
inputStreamProvider = new BlobInputStreamProvider(blob);
}
/**
* Creates a binary cell. The binary contents are read and saved to a
* temporary file, so they can be read later without keeping an open
* InputStreams.
*
* The inputStream is closed after use.
*
* @param id
* the cell id
* @param inputStream
* to read the data. It will be closed.
* @throws ModuleException
* if some IO problem occurs. The stream will still be closed.
*/
public BinaryCell(String id, InputStream inputStream) throws ModuleException {
super(id);
inputStreamProvider = new TemporaryPathInputStreamProvider(inputStream);
}
/**
* Creates a binary cell. This binary cell is a wrapper around a
* ProvidesInputStream object (whilst also providing Cell functionality).
*
* @param id
* the cell id
* @param inputStreamProvider
* the inputStream provider used to read BLOB data
*/
public BinaryCell(String id, InputStreamProvider inputStreamProvider) {
super(id);
this.inputStreamProvider = inputStreamProvider;
}
@Override
public InputStream createInputStream() throws ModuleException {
return inputStreamProvider.createInputStream();
}
@Override
public void cleanResources() {
inputStreamProvider.cleanResources();
}
@Override
public long getSize() throws ModuleException {
return inputStreamProvider.getSize();
}
}
| lgpl-3.0 |
AndriyGol/AndroidOTR | src/main/java/net/java/otr4j/OtrEngineListener.java | 462 | package net.java.otr4j;
import net.java.otr4j.session.SessionID;
/**
* This interface should be implemented by the host application. It notifies
* about session status changes.
*
* @author George Politis
*
*/
public interface OtrEngineListener {
public abstract void sessionStatusChanged(SessionID sessionID);
public abstract void multipleInstancesDetected(SessionID sessionID);
public abstract void outgoingSessionChanged(SessionID sessionID);
}
| lgpl-3.0 |
fingi/csipsimple | src/org/pjsip/pjsua/SWIGTYPE_p_f_int_p_q_const__pjmedia_sdp_session_p_void_p_enum_pjsip_status_code_p_pjsua_call_setting__void.java | 1067 | /* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 2.0.4
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package org.pjsip.pjsua;
public class SWIGTYPE_p_f_int_p_q_const__pjmedia_sdp_session_p_void_p_enum_pjsip_status_code_p_pjsua_call_setting__void {
private long swigCPtr;
protected SWIGTYPE_p_f_int_p_q_const__pjmedia_sdp_session_p_void_p_enum_pjsip_status_code_p_pjsua_call_setting__void(long cPtr, boolean futureUse) {
swigCPtr = cPtr;
}
protected SWIGTYPE_p_f_int_p_q_const__pjmedia_sdp_session_p_void_p_enum_pjsip_status_code_p_pjsua_call_setting__void() {
swigCPtr = 0;
}
protected static long getCPtr(SWIGTYPE_p_f_int_p_q_const__pjmedia_sdp_session_p_void_p_enum_pjsip_status_code_p_pjsua_call_setting__void obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
}
| lgpl-3.0 |
dresden-ocl/dresdenocl | plugins/org.dresdenocl.pivotmodel/src/org/dresdenocl/pivotmodel/NamedElement.java | 7696 | /**
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright (C) 2007 Matthias Braeuer (braeuer.matthias@web.de). *
* All rights reserved. *
* *
* This work was done as a project at the Chair for Software Technology, *
* Dresden University Of Technology, Germany (http://st.inf.tu-dresden.de). *
* It is understood that any modification not identified as such is not *
* covered by the preceding statement. *
* *
* This work is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Library General Public License as published *
* by the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This work is distributed in the hope that it will be useful, but WITHOUT *
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or *
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public *
* License for more details. *
* *
* You should have received a copy of the GNU Library General Public License *
* along with this library; if not, you can view it online at *
* http://www.fsf.org/licensing/licenses/gpl.html. *
* *
* To submit a bug report, send a comment, or get the latest news on this *
* project, please visit the website: http://dresden-ocl.sourceforge.net. *
* For more information on OCL and related projects visit the OCL Portal: *
* http://st.inf.tu-dresden.de/ocl *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* $Id$
*/
package org.dresdenocl.pivotmodel;
import java.util.List;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc --> A representation of the model object '
* <em><b>Named Element</b></em>'. <!-- end-user-doc -->
*
* <!-- begin-model-doc -->
* <p>
* A <code>NamedElement</code> represents elements that
* may have a name. The name is used for identification of
* the named element within the elements owned by its owner.
* A named element also has a qualified name that allows it to
* be unambiguously identified within a hierarchy of nested
* named elements. <code>NamedElement</code> is an
* abstract metaclass.
* </p>
* <!-- end-model-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link org.dresdenocl.pivotmodel.NamedElement#getName <em>Name</em>}</li>
* <li>{@link org.dresdenocl.pivotmodel.NamedElement#getQualifiedName <em>Qualified Name</em>}</li>
* <li>{@link org.dresdenocl.pivotmodel.NamedElement#getOwner <em>Owner</em>}</li>
* </ul>
* </p>
*
* @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement()
* @model abstract="true"
* @generated
*/
public interface NamedElement extends EObject {
/**
* Returns the value of the '<em><b>Name</b></em>' attribute.
* The default value is <code>""</code>.
* <!-- begin-user-doc --> <!-- end-user-doc -->
* <!-- begin-model-doc -->
* <p>
* Represents the name of the <code>NamedElement</code>.
* </p>
* <!-- end-model-doc -->
* @return the value of the '<em>Name</em>' attribute.
* @see #setName(String)
* @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement_Name()
* @model default="" dataType="org.dresdenocl.datatypes.String" required="true" ordered="false"
* @generated
*/
String getName();
/**
* Sets the value of the '{@link org.dresdenocl.pivotmodel.NamedElement#getName <em>Name</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @param value the new value of the '<em>Name</em>' attribute.
* @see #getName()
* @generated
*/
void setName(String value);
/**
* Returns the value of the '<em><b>Qualified Name</b></em>' attribute. <!--
* begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc -->
* <p>
* A qualified name allows the <code>NamedElement to be
* identified within a hierarchy of nested elements. It is
* constructed from the names of the owners starting at the
* root of the hierarchy and ending with the name of the
* <code>NamedElement</code> itself. This is a derived attribute.
* </p>
* <!-- end-model-doc -->
*
* @return the value of the '<em>Qualified Name</em>' attribute.
* @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement_QualifiedName()
* @model dataType="org.dresdenocl.datatypes.String" transient="true"
* changeable="false" volatile="true" derived="true"
* @generated
*/
String getQualifiedName();
/**
* Returns the value of the '<em><b>Owner</b></em>' reference. <!--
* begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc -->
* <p>
* A <code>NamedElement</code> may have an owner whose name is used for
* constructing the qualified name of the <code>NamedElement</code>. This is a
* derived attribute.
*
* The concept of an owner was introduced in the Pivot Model to facilitate the
* computation of qualified names which are not available in UML::Core::Basic.
* However, the Pivot Model does not extend the concept of a {@link Namespace}
* to {@link Type types} and {@link Operation operations} as in the complete
* UML 2.0 specification. That's why arbitrary named elements are not
* necessarily located in a namespace.
* </p>
* <!-- end-model-doc -->
*
* @return the value of the '<em>Owner</em>' reference.
* @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement_Owner()
* @model resolveProxies="false" transient="true" changeable="false"
* volatile="true"
* @generated
*/
NamedElement getOwner();
/**
* <!-- begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc -->
* <p>
* Creates a copy of this <code>NamedElement</code>. In the Pivot Model, all
* clone operations are intended to perform deep cloning (as opposed to a
* shallow clone). That means, that all contained elements (i.e., all elements
* for which this <code>NamedElement</code> is the owner) have to be cloned as
* well. Cloning support is necessary for binding {@link GenericElement}s
* because such an element needs to be cloned first before its
* {@link TypeParameter}s can be bound.
* </p>
* <!-- end-model-doc -->
*
* @model required="true"
* exceptions="org.dresdenocl.pivotmodel.CloneNotSupportedException"
* @generated
*/
NamedElement clone() throws CloneNotSupportedException;
/**
* <!-- begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc -->
* <p>
* Returns the name of this {@link NamedElement} as a {@link java.util.List}
* of {@link java.lang.String}s containing the name of the name spaces and of
* this {@link NamedElement}.
* </p>
* <!-- end-model-doc -->
*
* @model kind="operation" dataType="org.dresdenocl.datatypes.String"
* unique="false"
* @generated
*/
List<String> getQualifiedNameList();
} // NamedElement
| lgpl-3.0 |
ybk/nota.oxygen | addins/src/nota/oxygen/epub/notes/NoteRefRemover.java | 6848 | package nota.oxygen.epub.notes;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import java.beans.*;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
import nota.oxygen.epub.EpubUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import de.schlichtherle.truezip.file.TArchiveDetector;
import de.schlichtherle.truezip.file.TConfig;
import de.schlichtherle.truezip.file.TFile;
import de.schlichtherle.truezip.fs.archive.zip.JarDriver;
import de.schlichtherle.truezip.socket.sl.IOPoolLocator;
@SuppressWarnings("serial")
public class NoteRefRemover extends JPanel implements ActionListener, PropertyChangeListener {
private static JFrame frame;
private JButton startButton;
private JTextArea taskOutput;
private Task task;
private static String fileName = "";
public static boolean ERRORS_FOUND;
class Task extends SwingWorker<Void, Void> {
@Override
protected Void doInBackground() throws Exception {
if (!EpubUtils.start(taskOutput))
return null;
if (!EpubUtils.unzip(taskOutput))
return null;
if (!EpubUtils.backup(taskOutput))
return null;
EpubUtils.outputProcess("REMOVING NOTEREFS FROM DOCUMENT", true, taskOutput);
Document doc = EpubUtils.createDocument(new File(EpubUtils.EPUB_FOLDER + File.separator + fileName), taskOutput);
if (doc == null) {
return null;
}
NodeList liNodeList = doc.getDocumentElement().getElementsByTagName("li");
for (int i=0; i<liNodeList.getLength(); i++) {
Element liElement = (Element) liNodeList.item(i);
NodeList pNodeList = liElement.getElementsByTagName("p");
String id = liElement.getAttribute("id");
if (id == null || id.equals("")) {
EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element " + liElement.getTextContent() + ", id not found");
ERRORS_FOUND = true;
continue;
}
String epubType = liElement.getAttribute("epub:type");
if (epubType == null || epubType.equals("")) {
EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element with id " + id + ", epub:type not found");
ERRORS_FOUND = true;
continue;
}
if (!epubType.equals("footnote") && !epubType.equals("rearnote")) {
EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element with id " + id + ", epub:type should be either footnote or rearnote");
ERRORS_FOUND = true;
continue;
}
if (pNodeList.getLength() == 0) {
NodeList aNodeList = liElement.getElementsByTagName("a");
System.out.println("notes: " + aNodeList.getLength());
Set<Node> removeNodes = new HashSet<Node>();
for (int j = 0; j < aNodeList.getLength(); j++) {
removeNodes.add(aNodeList.item(j));
}
for (Node node : removeNodes) {
System.out.println("removing node with href " + ((Element)node).getAttribute("href"));
liElement.removeChild(node);
}
} else if (pNodeList.getLength() == 1) {
Element pElement = (Element) pNodeList.item(0);
NodeList aNodeList = pElement.getElementsByTagName("a");
System.out.println("notes: " + aNodeList.getLength());
Set<Node> removeNodes = new HashSet<Node>();
for (int j = 0; j < aNodeList.getLength(); j++) {
removeNodes.add(aNodeList.item(j));
}
for (Node node : removeNodes) {
System.out.println("removing node with href " + ((Element)node).getAttribute("href"));
pElement.removeChild(node);
}
} else {
EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element, too many paragraphs");
ERRORS_FOUND = true;
continue;
}
}
if (!EpubUtils.saveDocument(doc, new File(EpubUtils.EPUB_FOLDER + File.separator + fileName), taskOutput))
return null;
EpubUtils.outputProcess("MODIFYING EPUB", true, taskOutput);
// obtain the global configuration
TConfig config = TConfig.get();
config.setArchiveDetector(new TArchiveDetector("epub", new JarDriver(IOPoolLocator.SINGLETON)));
// get epub file destination
String epubPath = EpubUtils.EPUB.getPath();
String epubFolder = EpubUtils.EPUB_FOLDER.substring(EpubUtils.EPUB_FOLDER.lastIndexOf(File.separator)).replace(File.separator, "");
TFile destination = new TFile(epubPath + File.separator + epubFolder);
// modify epub file destination
if (!EpubUtils.addFileToEpub(new TFile(EpubUtils.EPUB_FOLDER + File.separator + fileName), destination, taskOutput))
return null;
// commit changes to epub file destination
if (!EpubUtils.commitChanges(taskOutput))
return null;
if (!EpubUtils.finish(taskOutput))
return null;
return null;
}
@Override
public void done() {
Toolkit.getDefaultToolkit().beep();
startButton.setEnabled(true);
setCursor(null);
EpubUtils.outputMessage(taskOutput, "Done");
}
}
public NoteRefRemover() {
super(new BorderLayout());
// Create the demo's UI.
startButton = new JButton("Start");
startButton.setActionCommand("start");
startButton.addActionListener(this);
startButton.setVisible(false);
taskOutput = new JTextArea(30, 130);
taskOutput.setMargin(new Insets(5, 5, 5, 5));
taskOutput.setEditable(false);
JPanel panel = new JPanel();
panel.add(startButton);
add(panel, BorderLayout.PAGE_START);
add(new JScrollPane(taskOutput), BorderLayout.CENTER);
setBorder(BorderFactory.createEmptyBorder(20, 20, 20, 20));
startButton.doClick();
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
// TODO Auto-generated method stub
}
@Override
public void actionPerformed(ActionEvent e) {
startButton.setEnabled(false);
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
task = new Task();
task.addPropertyChangeListener(this);
task.execute();
}
private static void createAndShowGUI() {
JComponent newContentPane = new NoteRefRemover();
newContentPane.setOpaque(true);
frame = new JFrame("Removing noterefs freom " + fileName);
frame.setContentPane(newContentPane);
frame.pack();
frame.setVisible(true);
}
public static void main(String[] args) {
EpubUtils.EPUB = new File(args[0]);
EpubUtils.EPUB_FOLDER = args[1];
EpubUtils.prepare("noterefremover", "noterefremove");
fileName = args[2];
// Schedule a job for the event-dispatching thread: creating and showing this application's GUI.
javax.swing.SwingUtilities.invokeLater(new Runnable() {
public void run() {
createAndShowGUI();
}
});
}
}
| lgpl-3.0 |
OpenDA-Association/OpenDA | core/native/src/openda/org/costa/CostaOpendaWAQUATestExe.java | 1200 | /* MOD_V2.0
* Copyright (c) 2012 OpenDA Association
* All rights reserved.
*
* This file is part of OpenDA.
*
* OpenDA is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* OpenDA is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with OpenDA. If not, see <http://www.gnu.org/licenses/>.
*/
package org.costa;
import org.openda.application.ApplicationRunner;
import java.io.IOException;
/**
* Test for COSTA Components, executable version
*/
public class CostaOpendaWAQUATestExe {
public static void main(String [] args) throws IOException {
ApplicationRunner.setRunningInTest(true);
org.openda.application.OpenDaApplication.main(args);
//CostaOpendaWAQUATest.testWaqua_EnKF(args);
}
}
| lgpl-3.0 |
SoftwareEngineeringToolDemos/FSE-2014-Archie-Smart-IDE | src/archie/editor/commands/UnmarkCodeElementCommand.java | 824 |
package archie.editor.commands;
import org.eclipse.gef.commands.Command;
import archie.model.Tim;
import archie.model.shapes.CodeElement;
import archie.monitoring.MonitoringManager;
public class UnmarkCodeElementCommand extends Command
{
CodeElement ce = null;
Tim tim = null;
public UnmarkCodeElementCommand()
{
super(UnmarkCodeElementAction.UNMARK);
}
public void setShape(CodeElement ce)
{
this.ce = ce;
}
public void setTim(Tim inTim)
{
this.tim = inTim;
}
@Override
public boolean canExecute()
{
return ce.isMarked();
}
@Override
public void execute()
{
// Unmark
MonitoringManager.getIntance().unmarkAndClearWarnings(ce.getAssociatedPath());
}
@Override
public void undo()
{
// Remark
MonitoringManager.getIntance().markAndGenerateWarnings(ce.getAssociatedPath());
}
}
| lgpl-3.0 |
mbucknell/WQP-WQX-Services | src/test/java/gov/usgs/wma/wqp/dao/count/CountDaoStationKmlIT.java | 3656 | package gov.usgs.wma.wqp.dao.count;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import com.github.springtestdbunit.annotation.DatabaseSetup;
import com.github.springtestdbunit.annotation.DbUnitConfiguration;
import gov.usgs.wma.wqp.CsvDataSetLoader;
import gov.usgs.wma.wqp.dao.CountDao;
import gov.usgs.wma.wqp.dao.NameSpace;
import gov.usgs.wma.wqp.springinit.DBTestConfig;
@SpringBootTest(webEnvironment=WebEnvironment.NONE,
classes={DBTestConfig.class, CountDao.class})
@DatabaseSetup("classpath:/testData/csv/")
@DbUnitConfiguration(dataSetLoader = CsvDataSetLoader.class)
public class CountDaoStationKmlIT extends BaseStationCountDaoTest {
protected NameSpace nameSpace = NameSpace.STATION_KML;
@Test
public void testHarness() {
activityTest(nameSpace, includeActivity, includeResults);
analyticalMethodTest(nameSpace, includeActivity, includeResults);
assemblageTest(nameSpace, includeActivity, includeResults);
avoidTest(nameSpace, includeActivity, includeResults);
bboxTest(nameSpace, includeActivity, includeResults);
characteristicNameTest(nameSpace, includeActivity, includeResults);
characteristicTypeTest(nameSpace, includeActivity, includeResults);
countryTest(nameSpace, includeActivity, includeResults);
countyTest(nameSpace, includeActivity, includeResults);
emptyParameterTest(nameSpace, includeActivity, includeResults);
huc2Test(nameSpace, includeActivity, includeResults);
huc3Test(nameSpace, includeActivity, includeResults);
huc4Test(nameSpace, includeActivity, includeResults);
huc5Test(nameSpace, includeActivity, includeResults);
huc6Test(nameSpace, includeActivity, includeResults);
huc7Test(nameSpace, includeActivity, includeResults);
huc8Test(nameSpace, includeActivity, includeResults);
huc10Test(nameSpace, includeActivity, includeResults);
huc12Test(nameSpace, includeActivity, includeResults);
mimeTypeTest(nameSpace, includeActivity, includeResults);
minActivitiesTest(nameSpace, includeActivity, includeResults);
minResultsTest(nameSpace, includeActivity, includeResults);
nldiSitesTest(nameSpace, includeActivity, includeResults);
nldiUrlTest(nameSpace, includeActivity, includeResults);
nullParameterTest(nameSpace, includeActivity, includeResults);
organizationTest(nameSpace, includeActivity, includeResults);
pcodeTest(nameSpace, includeActivity, includeResults);
projectTest(nameSpace, includeActivity, includeResults);
providersTest(nameSpace, includeActivity, includeResults);
resultTest(nameSpace, includeActivity, includeResults);
sampleMediaTest(nameSpace, includeActivity, includeResults);
siteIdTest(nameSpace, includeActivity, includeResults);
siteIdLargeListTest(nameSpace, includeActivity, includeResults);
siteTypeTest(nameSpace, includeActivity, includeResults);
siteUrlBaseTest(nameSpace, includeActivity, includeResults);
sortedTest(nameSpace, includeActivity, includeResults);
startDateHiTest(nameSpace, includeActivity, includeResults);
startDateLoTest(nameSpace, includeActivity, includeResults);
stateTest(nameSpace, includeActivity, includeResults);
subjectTaxonomicNameTest(nameSpace, includeActivity, includeResults);
withinTest(nameSpace, includeActivity, includeResults);
zipTest(nameSpace, includeActivity, includeResults);
multipleParameterStationSumTest(nameSpace, includeActivity, includeResults);
multipleParameterActivitySumTest(nameSpace, includeActivity, includeResults);
multipleParameterResultSumTest(nameSpace, includeActivity, includeResults);
}
}
| unlicense |
eSDK/esdk_cloud_fc_native_java | source/src/main/java/com/huawei/esdk/fusioncompute/local/model/net/Subnet.java | 3959 | package com.huawei.esdk.fusioncompute.local.model.net;
import java.util.List;
/**
* Subnet详细信息
* <p>
* @since eSDK Cloud V100R003C50
*/
public class Subnet
{
/**
* Subnet 标识。
*/
private String urn;
/**
* 访问subnet所用的uri。
*/
private String uri;
/**
* Subnet名称,长度[1,256]。
*/
private String name;
/**
* Subnet使用的Vlan号 ,范围:1 - 4094。
*/
private Integer vlanId;
/**
* 【可选】网关IP地址。
*/
private String gateway;
/**
* IP地址。
*/
private String netAddr;
/**
* 掩码,范围:1-31。
*/
private Integer netMask;
/**
* 【可选】系统预留IP地址。
*(说明:此字段专供一体机使用,GM创建subnet时选择此字段,就不需要再选择gateway,同样,选择了gateway,就不需要再选择此字段。)
*/
private String sysReserveIp;
/**
* 【可选】保留地址段,列表最大数量为3。
* 如:"192.168.0.1-192.168.0.5"
*/
private List<String> reserveIps;
/**
* 【可选】描述信息,长度[0,1024]。
*/
private String description;
/**
* dhcpOption详细信息。
*/
private List<DhcpOption> dhcpOption;
/**
* 已分配地址数量,查询指定Subnet信息接口时返回值使用。
*/
private Integer allocateAddrNum;
/**
* 可用地址数量,查询指定Subnet信息接口时返回值使用。
*/
private Integer availableAddrNum;
public String getUrn()
{
return urn;
}
public void setUrn(String urn)
{
this.urn = urn;
}
public String getUri()
{
return uri;
}
public void setUri(String uri)
{
this.uri = uri;
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
public Integer getVlanId()
{
return vlanId;
}
public void setVlanId(Integer vlanId)
{
this.vlanId = vlanId;
}
public String getGateway()
{
return gateway;
}
public void setGateway(String gateway)
{
this.gateway = gateway;
}
public String getNetAddr()
{
return netAddr;
}
public void setNetAddr(String netAddr)
{
this.netAddr = netAddr;
}
public Integer getNetMask()
{
return netMask;
}
public void setNetMask(Integer netMask)
{
this.netMask = netMask;
}
public String getSysReserveIp()
{
return sysReserveIp;
}
public void setSysReserveIp(String sysReserveIp)
{
this.sysReserveIp = sysReserveIp;
}
public List<String> getReserveIps()
{
return reserveIps;
}
public void setReserveIps(List<String> reserveIps)
{
this.reserveIps = reserveIps;
}
public String getDescription()
{
return description;
}
public void setDescription(String description)
{
this.description = description;
}
public List<DhcpOption> getDhcpOption()
{
return dhcpOption;
}
public void setDhcpOption(List<DhcpOption> dhcpOption)
{
this.dhcpOption = dhcpOption;
}
public Integer getAllocateAddrNum()
{
return allocateAddrNum;
}
public void setAllocateAddrNum(Integer allocateAddrNum)
{
this.allocateAddrNum = allocateAddrNum;
}
public Integer getAvailableAddrNum()
{
return availableAddrNum;
}
public void setAvailableAddrNum(Integer availableAddrNum)
{
this.availableAddrNum = availableAddrNum;
}
}
| apache-2.0 |
waiteryee1/ansj_seg | src/main/java/org/ansj/recognition/arrimpl/NumRecognition.java | 1786 | package org.ansj.recognition.arrimpl;
import org.ansj.domain.Term;
import org.ansj.recognition.TermArrRecognition;
import org.ansj.util.MyStaticValue;
import org.ansj.util.TermUtil;
public class NumRecognition implements TermArrRecognition {
/**
* 数字+数字合并,zheng
*
* @param terms
*/
public void recognition(Term[] terms) {
int length = terms.length - 1;
Term from = null;
Term to = null;
Term temp = null;
for (int i = 0; i < length; i++) {
if (terms[i] == null) {
continue;
} else if (".".equals(terms[i].getName()) || ".".equals(terms[i].getName())) {
// 如果是.前后都为数字进行特殊处理
to = terms[i].to();
from = terms[i].from();
if (from.termNatures().numAttr.flag && to.termNatures().numAttr.flag) {
from.setName(from.getName() + "." + to.getName());
TermUtil.termLink(from, to.to());
terms[to.getOffe()] = null;
terms[i] = null;
i = from.getOffe() - 1;
}
continue;
} else if (!terms[i].termNatures().numAttr.flag) {
continue;
}
temp = terms[i];
// 将所有的数字合并
while ((temp = temp.to()).termNatures().numAttr.flag) {
terms[i].setName(terms[i].getName() + temp.getName());
}
// 如果是数字结尾
if (MyStaticValue.isQuantifierRecognition && temp.termNatures().numAttr.numEndFreq > 0) {
terms[i].setName(terms[i].getName() + temp.getName());
temp = temp.to();
}
// 如果不等,说明terms[i]发生了改变
if (terms[i].to() != temp) {
TermUtil.termLink(terms[i], temp);
// 将中间无用元素设置为null
for (int j = i + 1; j < temp.getOffe(); j++) {
terms[j] = null;
}
i = temp.getOffe() - 1;
}
}
}
}
| apache-2.0 |
geigerj/toolkit | src/main/java/com/google/api/codegen/viewmodel/DynamicLangDefaultableParamView.java | 1165 | /* Copyright 2016 Google Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.codegen.viewmodel;
import com.google.auto.value.AutoValue;
@AutoValue
public abstract class DynamicLangDefaultableParamView {
public abstract String name();
public abstract String defaultValue();
public static Builder newBuilder() {
return new AutoValue_DynamicLangDefaultableParamView.Builder();
}
@AutoValue.Builder
public static abstract class Builder {
public abstract Builder name(String name);
public abstract Builder defaultValue(String value);
public abstract DynamicLangDefaultableParamView build();
}
}
| apache-2.0 |
gehel/spring-social-bitbucket | src/main/java/org/springframework/social/bitbucket/api/impl/InvitationsTemplate.java | 1983 | /**
* Copyright (C) 2012 Eric Bottard / Guillaume Lederrey (eric.bottard+ghpublic@gmail.com / guillaume.lederrey@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.bitbucket.api.impl;
import org.springframework.social.bitbucket.api.BitBucketInvitation;
import org.springframework.social.bitbucket.api.BitBucketPrivilege;
import org.springframework.social.bitbucket.api.InvitationsOperations;
import org.springframework.social.support.ParameterMap;
import org.springframework.web.client.RestTemplate;
/**
* @author Cyprian Śniegota
* @since 2.0.0
*/
public class InvitationsTemplate extends AbstractBitBucketOperations implements InvitationsOperations {
public InvitationsTemplate(RestTemplate restTemplate, boolean authorized) {
super(restTemplate, authorized, V1);
}
@Override
public final BitBucketInvitation sendInvitation(String accountName, String repoSlug, String emailAddress, BitBucketPrivilege perm) {
return getRestTemplate().postForObject(buildUrl("/invitations/{accountname}/{repo_slug}/{emailaddress}"),
new SendInvitationParametersHolder(perm), BitBucketInvitation.class, accountName, repoSlug, emailAddress);
}
private static final class SendInvitationParametersHolder extends ParameterMap {
public SendInvitationParametersHolder(BitBucketPrivilege privilege) {
add("permission", privilege.toString());
}
}
}
| apache-2.0 |
apache/batik | batik-css/src/main/java/org/apache/batik/css/engine/value/svg12/DeviceColor.java | 3839 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.css.engine.value.svg12;
import org.apache.batik.css.engine.value.AbstractValue;
import org.w3c.dom.DOMException;
import org.w3c.dom.css.CSSValue;
/**
* This class represents an device-specific color value.
*
* @version $Id$
*/
public class DeviceColor extends AbstractValue {
public static final String DEVICE_GRAY_COLOR_FUNCTION = "device-gray";
public static final String DEVICE_RGB_COLOR_FUNCTION = "device-rgb";
public static final String DEVICE_CMYK_COLOR_FUNCTION = "device-cmyk";
public static final String DEVICE_NCHANNEL_COLOR_FUNCTION = "device-nchannel";
protected boolean nChannel;
/**
* The color count.
*/
protected int count;
/**
* The colors.
*/
protected float[] colors = new float[5];
/**
* Creates a new DeviceColor.
* @param nChannel true for a device-nchannel() color, false for Gray, RGB and CMYK
*/
public DeviceColor(boolean nChannel) {
this.nChannel = nChannel;
}
/**
* Implements {@link
* org.apache.batik.css.engine.value.Value#getCssValueType()}.
*/
public short getCssValueType() {
return CSSValue.CSS_CUSTOM;
}
/**
* Indicates whether this color uses an N-Channel color space.
* @return true if N-Channel is used
*/
public boolean isNChannel() {
return this.nChannel;
}
/**
* Returns the number of colors.
*/
public int getNumberOfColors() throws DOMException {
return count;
}
/**
* Returns the color at the given index.
*/
public float getColor(int i) throws DOMException {
return colors[i];
}
/**
* A string representation of the current value.
*/
public String getCssText() {
StringBuffer sb = new StringBuffer( count * 8 );
if (nChannel) {
sb.append(DEVICE_NCHANNEL_COLOR_FUNCTION);
} else {
switch (count) {
case 1:
sb.append(DEVICE_GRAY_COLOR_FUNCTION);
break;
case 3:
sb.append(DEVICE_RGB_COLOR_FUNCTION);
break;
case 4:
sb.append(DEVICE_CMYK_COLOR_FUNCTION);
break;
default:
throw new IllegalStateException("Invalid number of components encountered");
}
}
sb.append('(');
for (int i = 0; i < count; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(colors[i]);
}
sb.append( ')' );
return sb.toString();
}
/**
* Appends a color to the list.
*/
public void append(float c) {
if (count == colors.length) {
float[] t = new float[count * 2];
System.arraycopy( colors, 0, t, 0, count );
colors = t;
}
colors[count++] = c;
}
/** {@inheritDoc} */
public String toString() {
return getCssText();
}
}
| apache-2.0 |
prateekm/samza | samza-core/src/main/java/org/apache/samza/table/caching/guava/GuavaCacheTable.java | 5858 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.table.caching.guava;
import com.google.common.cache.Cache;
import org.apache.samza.SamzaException;
import org.apache.samza.context.Context;
import org.apache.samza.storage.kv.Entry;
import org.apache.samza.table.BaseReadWriteTable;
import org.apache.samza.table.ReadWriteTable;
import org.apache.samza.table.utils.TableMetricsUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
* Simple cache table backed by a Guava cache instance. Application is expect to build
* a cache instance with desired parameters and specify it to the table descriptor.
*
* @param <K> type of the key in the cache
* @param <V> type of the value in the cache
*/
public class GuavaCacheTable<K, V> extends BaseReadWriteTable<K, V>
implements ReadWriteTable<K, V> {
private final Cache<K, V> cache;
public GuavaCacheTable(String tableId, Cache<K, V> cache) {
super(tableId);
this.cache = cache;
}
@Override
public void init(Context context) {
super.init(context);
TableMetricsUtil tableMetricsUtil = new TableMetricsUtil(context, this, tableId);
// hit- and miss-rate are provided by CachingTable.
tableMetricsUtil.newGauge("evict-count", () -> cache.stats().evictionCount());
}
@Override
public V get(K key, Object ... args) {
try {
return getAsync(key).get();
} catch (Exception e) {
throw new SamzaException("GET failed for " + key, e);
}
}
@Override
public CompletableFuture<V> getAsync(K key, Object ... args) {
CompletableFuture<V> future = new CompletableFuture<>();
try {
future.complete(cache.getIfPresent(key));
} catch (Exception e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public Map<K, V> getAll(List<K> keys, Object ... args) {
try {
return getAllAsync(keys).get();
} catch (Exception e) {
throw new SamzaException("GET_ALL failed for " + keys, e);
}
}
@Override
public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys, Object ... args) {
CompletableFuture<Map<K, V>> future = new CompletableFuture<>();
try {
future.complete(cache.getAllPresent(keys));
} catch (Exception e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public void put(K key, V value, Object ... args) {
try {
putAsync(key, value).get();
} catch (Exception e) {
throw new SamzaException("PUT failed for " + key, e);
}
}
@Override
public CompletableFuture<Void> putAsync(K key, V value, Object ... args) {
if (key == null) {
return deleteAsync(key);
}
CompletableFuture<Void> future = new CompletableFuture<>();
try {
cache.put(key, value);
future.complete(null);
} catch (Exception e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public void putAll(List<Entry<K, V>> entries, Object ... args) {
try {
putAllAsync(entries).get();
} catch (Exception e) {
throw new SamzaException("PUT_ALL failed", e);
}
}
@Override
public CompletableFuture<Void> putAllAsync(List<Entry<K, V>> entries, Object ... args) {
CompletableFuture<Void> future = new CompletableFuture<>();
try {
// Separate out put vs delete records
List<K> delKeys = new ArrayList<>();
List<Entry<K, V>> putRecords = new ArrayList<>();
entries.forEach(r -> {
if (r.getValue() != null) {
putRecords.add(r);
} else {
delKeys.add(r.getKey());
}
});
cache.invalidateAll(delKeys);
putRecords.forEach(e -> put(e.getKey(), e.getValue()));
future.complete(null);
} catch (Exception e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public void delete(K key, Object ... args) {
try {
deleteAsync(key).get();
} catch (Exception e) {
throw new SamzaException("DELETE failed", e);
}
}
@Override
public CompletableFuture<Void> deleteAsync(K key, Object ... args) {
CompletableFuture<Void> future = new CompletableFuture<>();
try {
cache.invalidate(key);
future.complete(null);
} catch (Exception e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public void deleteAll(List<K> keys, Object ... args) {
try {
deleteAllAsync(keys).get();
} catch (Exception e) {
throw new SamzaException("DELETE_ALL failed", e);
}
}
@Override
public CompletableFuture<Void> deleteAllAsync(List<K> keys, Object ... args) {
CompletableFuture<Void> future = new CompletableFuture<>();
try {
cache.invalidateAll(keys);
future.complete(null);
} catch (Exception e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public synchronized void flush() {
cache.cleanUp();
}
@Override
public synchronized void close() {
cache.invalidateAll();
}
}
| apache-2.0 |
carlos-anaya/HTML5Parser | Code/src/main/java/com/html5parser/constants/SVGAttributesTable.java | 3160 | package com.html5parser.constants;
import java.util.HashMap;
import java.util.Map;
public class SVGAttributesTable {
/**
* Value description:
*
* first column is the attribute name
* second column is the fixed attribute name
*/
public static final Map<String, String> TABLE;
static {
TABLE = new HashMap<String, String>();
TABLE.put("attributename", "attributeName");
TABLE.put("attributetype", "attributeType");
TABLE.put("basefrequency", "baseFrequency");
TABLE.put("baseprofile", "baseProfile");
TABLE.put("calcmode", "calcMode");
TABLE.put("clippathunits", "clipPathUnits");
TABLE.put("contentscripttype", "contentScriptType");
TABLE.put("contentstyletype", "contentStyleType");
TABLE.put("diffuseconstant", "diffuseConstant");
TABLE.put("edgemode", "edgeMode");
TABLE.put("externalresourcesrequired", "externalResourcesRequired");
TABLE.put("filterres", "filterRes");
TABLE.put("filterunits", "filterUnits");
TABLE.put("glyphref", "glyphRef");
TABLE.put("gradienttransform", "gradientTransform");
TABLE.put("gradientunits", "gradientUnits");
TABLE.put("kernelmatrix", "kernelMatrix");
TABLE.put("kernelunitlength", "kernelUnitLength");
TABLE.put("keypoints", "keyPoints");
TABLE.put("keysplines", "keySplines");
TABLE.put("keytimes", "keyTimes");
TABLE.put("lengthadjust", "lengthAdjust");
TABLE.put("limitingconeangle", "limitingConeAngle");
TABLE.put("markerheight", "markerHeight");
TABLE.put("markerunits", "markerUnits");
TABLE.put("markerwidth", "markerWidth");
TABLE.put("maskcontentunits", "maskContentUnits");
TABLE.put("maskunits", "maskUnits");
TABLE.put("numoctaves", "numOctaves");
TABLE.put("pathlength", "pathLength");
TABLE.put("patterncontentunits", "patternContentUnits");
TABLE.put("patterntransform", "patternTransform");
TABLE.put("patternunits", "patternUnits");
TABLE.put("pointsatx", "pointsAtX");
TABLE.put("pointsaty", "pointsAtY");
TABLE.put("pointsatz", "pointsAtZ");
TABLE.put("preservealpha", "preserveAlpha");
TABLE.put("preserveaspectratio", "preserveAspectRatio");
TABLE.put("primitiveunits", "primitiveUnits");
TABLE.put("refx", "refX");
TABLE.put("refy", "refY");
TABLE.put("repeatcount", "repeatCount");
TABLE.put("repeatdur", "repeatDur");
TABLE.put("requiredextensions", "requiredExtensions");
TABLE.put("requiredfeatures", "requiredFeatures");
TABLE.put("specularconstant", "specularConstant");
TABLE.put("specularexponent", "specularExponent");
TABLE.put("spreadmethod", "spreadMethod");
TABLE.put("startoffset", "startOffset");
TABLE.put("stddeviation", "stdDeviation");
TABLE.put("stitchtiles", "stitchTiles");
TABLE.put("surfacescale", "surfaceScale");
TABLE.put("systemlanguage", "systemLanguage");
TABLE.put("tablevalues", "tableValues");
TABLE.put("targetx", "targetX");
TABLE.put("targety", "targetY");
TABLE.put("textlength", "textLength");
TABLE.put("viewbox", "viewBox");
TABLE.put("viewtarget", "viewTarget");
TABLE.put("xchannelselector", "xChannelSelector");
TABLE.put("ychannelselector", "yChannelSelector");
TABLE.put("zoomandpan", "zoomAndPan");
}
}
| apache-2.0 |
apache/commons-digester | core/src/main/java/org/apache/commons/digester3/plugins/RuleFinder.java | 3535 | package org.apache.commons.digester3.plugins;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Properties;
import org.apache.commons.digester3.Digester;
/**
* Each concrete implementation of RuleFinder is an algorithm for locating a source of digester rules for a plugin. The
* algorithm may use info explicitly provided by the user as part of the plugin declaration, or not (in which case the
* concrete RuleFinder subclass typically has Dflt as part of its name).
* <p>
* Instances of this class can also be regarded as a Factory for RuleLoaders, except that an instance of a RuleLoader is
* only created if the particular finder algorithm can locate a suitable source of rules given the plugin class and
* associated properties.
* <p>
* This is an abstract class rather than an interface in order to make it possible to enhance this class in future
* without breaking binary compatibility; it is possible to add methods to an abstract class, but not to an interface.
*
* @since 1.6
*/
public abstract class RuleFinder
{
/**
* Apply the finder algorithm to attempt to locate a source of digester rules for the specified plugin class.
* <p>
* This method is invoked when a plugin is declared by the user, either via an explicit use of
* PluginDeclarationRule, or implicitly via an "inline declaration" where the declaration and use are simultaneous.
* <p>
* If dynamic rules for the specified plugin class are located, then the RuleFinder will return a RuleLoader object
* encapsulating those rules, and this object will be invoked each time the user actually requests an instance of
* the declared plugin class, to load the custom rules associated with that plugin instance.
* <p>
* If no dynamic rules can be found, null is returned. This is not an error; merely an indication that this
* particular algorithm found no matches.
* <p>
* The properties object holds any xml attributes the user may have specified on the plugin declaration in order to
* indicate how to locate the plugin rules.
* <p>
*
* @param d The digester instance where locating plugin classes
* @param pluginClass The plugin Java class
* @param p The properties object that holds any xml attributes the user may have specified on the plugin
* declaration in order to indicate how to locate the plugin rules.
* @return a source of digester rules for the specified plugin class.
* @throws PluginException if the algorithm finds a source of rules, but there is something invalid
* about that source.
*/
public abstract RuleLoader findLoader( Digester d, Class<?> pluginClass, Properties p )
throws PluginException;
}
| apache-2.0 |
lemire/incubator-kylin | job/src/main/java/org/apache/kylin/job/tools/DefaultX509TrustManager.java | 3718 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.job.tools;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author xduo
*
*/
public class DefaultX509TrustManager implements X509TrustManager {
/** Log object for this class. */
private static Logger LOG = LoggerFactory.getLogger(DefaultX509TrustManager.class);
private X509TrustManager standardTrustManager = null;
/**
* Constructor for DefaultX509TrustManager.
*
*/
public DefaultX509TrustManager(KeyStore keystore) throws NoSuchAlgorithmException, KeyStoreException {
super();
TrustManagerFactory factory = TrustManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
factory.init(keystore);
TrustManager[] trustmanagers = factory.getTrustManagers();
if (trustmanagers.length == 0) {
throw new NoSuchAlgorithmException("SunX509 trust manager not supported");
}
this.standardTrustManager = (X509TrustManager) trustmanagers[0];
}
public X509Certificate[] getAcceptedIssuers() {
return this.standardTrustManager.getAcceptedIssuers();
}
public boolean isClientTrusted(X509Certificate[] certificates) {
return true;
// return this.standardTrustManager.isClientTrusted(certificates);
}
public boolean isServerTrusted(X509Certificate[] certificates) {
if ((certificates != null) && LOG.isDebugEnabled()) {
LOG.debug("Server certificate chain:");
for (int i = 0; i < certificates.length; i++) {
if (LOG.isDebugEnabled()) {
LOG.debug("X509Certificate[" + i + "]=" + certificates[i]);
}
}
}
if ((certificates != null) && (certificates.length == 1)) {
X509Certificate certificate = certificates[0];
try {
certificate.checkValidity();
} catch (CertificateException e) {
LOG.error(e.toString());
return false;
}
return true;
} else {
return true;
// return this.standardTrustManager.isServerTrusted(certificates);
}
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
// TODO Auto-generated method stub
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
// TODO Auto-generated method stub
}
}
| apache-2.0 |
android-art-intel/Nougat | art-extension/test/536-checker-intrinsic-optimization/src/Main.java | 2384 | /*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class Main {
public static void main(String[] args) {
stringEqualsSame();
stringArgumentNotNull("Foo");
}
/// CHECK-START: boolean Main.stringEqualsSame() instruction_simplifier (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: boolean Main.stringEqualsSame() register (before)
/// CHECK: <<Const1:i\d+>> IntConstant 1
/// CHECK: Return [<<Const1>>]
/// CHECK-START: boolean Main.stringEqualsSame() register (before)
/// CHECK-NOT: InvokeStaticOrDirect
public static boolean stringEqualsSame() {
return $inline$callStringEquals("obj", "obj");
}
/// CHECK-START: boolean Main.stringEqualsNull() register (after)
/// CHECK: <<Invoke:z\d+>> InvokeVirtual
/// CHECK: Return [<<Invoke>>]
public static boolean stringEqualsNull() {
String o = (String)myObject;
return $inline$callStringEquals(o, o);
}
public static boolean $inline$callStringEquals(String a, String b) {
return a.equals(b);
}
/// CHECK-START-X86: boolean Main.stringArgumentNotNull(java.lang.Object) disassembly (after)
/// CHECK: InvokeVirtual {{.*\.equals.*}}
/// CHECK-NOT: test
public static boolean stringArgumentNotNull(Object obj) {
obj.getClass();
return "foo".equals(obj);
}
// Test is very brittle as it depends on the order we emit instructions.
/// CHECK-START-X86: boolean Main.stringArgumentIsString() disassembly (after)
/// CHECK: InvokeVirtual
/// CHECK: test
/// CHECK: jz/eq
// Check that we don't try to compare the classes.
/// CHECK-NOT: mov
/// CHECK: cmp
public static boolean stringArgumentIsString() {
return "foo".equals(myString);
}
static String myString;
static Object myObject;
}
| apache-2.0 |
google/binnavi | src/main/java/com/google/security/zynamics/binnavi/config/DebugColorsConfigItem.java | 5605 | // Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.config;
import com.google.security.zynamics.common.config.AbstractConfigItem;
import com.google.security.zynamics.common.config.TypedPropertiesWrapper;
import java.awt.Color;
public class DebugColorsConfigItem extends AbstractConfigItem {
private static final String BREAKPOINT_ACTIVE_COLOR = "BreakpointActive";
private static final Color BREAKPOINT_ACTIVE_COLOR_DEFAULT = new Color(-16740608);
private Color breakpointActive = BREAKPOINT_ACTIVE_COLOR_DEFAULT;
private static final String BREAKPOINT_INACTIVE_COLOR = "BreakpointInactive";
private static final Color BREAKPOINT_INACTIVE_COLOR_DEFAULT = new Color(-16763956);
private Color breakpointInactive = BREAKPOINT_INACTIVE_COLOR_DEFAULT;
private static final String BREAKPOINT_DISABLED_COLOR = "BreakpointDisabled";
private static final Color BREAKPOINT_DISABLED_COLOR_DEFAULT = new Color(-5592663);
private Color breakpointDisabled = BREAKPOINT_DISABLED_COLOR_DEFAULT;
private static final String BREAKPOINT_HIT_COLOR = "BreakpointHit";
private static final Color BREAKPOINT_HIT_COLOR_DEFAULT = new Color(-5046272);
private Color breakpointHit = BREAKPOINT_HIT_COLOR_DEFAULT;
private static final String BREAKPOINT_ENABLED_COLOR = "BreakpointEnabled";
private static final Color BREAKPOINT_ENABLED_COLOR_DEFAULT = new Color(-16740608);
private Color breakpointEnabled = BREAKPOINT_ENABLED_COLOR_DEFAULT;
private static final String BREAKPOINT_INVALID_COLOR = "BreakpointInvalid";
private static final Color BREAKPOINT_INVALID_COLOR_DEFAULT = new Color(-16777216);
private Color breakpointInvalid = BREAKPOINT_INVALID_COLOR_DEFAULT;
private static final String BREAKPOINT_DELETING_COLOR = "BreakpointDeleting";
private static final Color BREAKPOINT_DELETING_COLOR_DEFAULT = new Color(-3328);
private Color breakpointDeleting = BREAKPOINT_DELETING_COLOR_DEFAULT;
private static final String ACTIVE_LINE_COLOR = "ActiveLine";
private static final Color ACTIVE_LINE_COLOR_DEFAULT = new Color(-65536);
private Color activeLine = ACTIVE_LINE_COLOR_DEFAULT;
@Override
public void load(final TypedPropertiesWrapper properties) {
breakpointActive =
properties.getColor(BREAKPOINT_ACTIVE_COLOR, BREAKPOINT_ACTIVE_COLOR_DEFAULT);
breakpointInactive =
properties.getColor(BREAKPOINT_INACTIVE_COLOR, BREAKPOINT_INACTIVE_COLOR_DEFAULT);
breakpointDisabled =
properties.getColor(BREAKPOINT_DISABLED_COLOR, BREAKPOINT_DISABLED_COLOR_DEFAULT);
breakpointHit = properties.getColor(BREAKPOINT_HIT_COLOR, BREAKPOINT_HIT_COLOR_DEFAULT);
breakpointEnabled =
properties.getColor(BREAKPOINT_ENABLED_COLOR, BREAKPOINT_ENABLED_COLOR_DEFAULT);
breakpointInvalid =
properties.getColor(BREAKPOINT_INVALID_COLOR, BREAKPOINT_INVALID_COLOR_DEFAULT);
breakpointDeleting =
properties.getColor(BREAKPOINT_DELETING_COLOR, BREAKPOINT_DELETING_COLOR_DEFAULT);
activeLine = properties.getColor(ACTIVE_LINE_COLOR, ACTIVE_LINE_COLOR_DEFAULT);
}
@Override
public void store(final TypedPropertiesWrapper properties) {
properties.setColor(BREAKPOINT_ACTIVE_COLOR, breakpointActive);
properties.setColor(BREAKPOINT_INACTIVE_COLOR, breakpointInactive);
properties.setColor(BREAKPOINT_DISABLED_COLOR, breakpointDisabled);
properties.setColor(BREAKPOINT_HIT_COLOR, breakpointHit);
properties.setColor(BREAKPOINT_ENABLED_COLOR, breakpointEnabled);
properties.setColor(BREAKPOINT_INVALID_COLOR, breakpointInvalid);
properties.setColor(BREAKPOINT_DELETING_COLOR, breakpointDeleting);
properties.setColor(ACTIVE_LINE_COLOR, activeLine);
}
public Color getBreakpointActive() {
return breakpointActive;
}
public void setBreakpointActive(final Color value) {
this.breakpointActive = value;
}
public Color getBreakpointInactive() {
return breakpointInactive;
}
public void setBreakpointInactive(final Color value) {
this.breakpointInactive = value;
}
public Color getBreakpointDisabled() {
return breakpointDisabled;
}
public void setBreakpointDisabled(final Color value) {
this.breakpointDisabled = value;
}
public Color getBreakpointHit() {
return breakpointHit;
}
public void setBreakpointHit(final Color value) {
this.breakpointHit = value;
}
public Color getBreakpointEnabled() {
return breakpointEnabled;
}
public void setBreakpointEnabled(final Color value) {
this.breakpointEnabled = value;
}
public Color getBreakpointInvalid() {
return breakpointInvalid;
}
public void setBreakpointInvalid(final Color value) {
this.breakpointInvalid = value;
}
public Color getBreakpointDeleting() {
return breakpointDeleting;
}
public void setBreakpointDeleting(final Color value) {
this.breakpointDeleting = value;
}
public Color getActiveLine() {
return activeLine;
}
public void setActiveLine(final Color value) {
this.activeLine = value;
}
}
| apache-2.0 |
Sivaramvt/acs-aem-commons | bundle/src/main/java/com/adobe/acs/commons/workflow/bulk/removal/impl/exceptions/WorkflowRemovalMaxDurationExceededException.java | 794 | /*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2015 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.workflow.bulk.removal.impl.exceptions;
public class WorkflowRemovalMaxDurationExceededException extends Exception {
}
| apache-2.0 |
romankagan/DDBWorkbench | plugins/ui-designer/src/com/intellij/uiDesigner/snapShooter/CreateSnapShotAction.java | 23500 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.uiDesigner.snapShooter;
import com.intellij.execution.*;
import com.intellij.execution.application.ApplicationConfiguration;
import com.intellij.execution.application.ApplicationConfigurationType;
import com.intellij.execution.executors.DefaultRunExecutor;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ProgramRunner;
import com.intellij.execution.util.JreVersionDetector;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeView;
import com.intellij.ide.highlighter.JavaHighlightingColors;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.ui.ColoredTreeCellRenderer;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.uiDesigner.GuiFormFileType;
import com.intellij.uiDesigner.UIDesignerBundle;
import com.intellij.uiDesigner.designSurface.InsertComponentProcessor;
import com.intellij.uiDesigner.palette.ComponentItem;
import com.intellij.uiDesigner.palette.Palette;
import com.intellij.uiDesigner.radComponents.LayoutManagerRegistry;
import com.intellij.uiDesigner.radComponents.RadComponentFactory;
import com.intellij.uiDesigner.radComponents.RadContainer;
import com.intellij.util.IncorrectOperationException;
import icons.UIDesignerIcons;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
/**
* @author yole
*/
public class CreateSnapShotAction extends AnAction {
private static final Logger LOG = Logger.getInstance("com.intellij.uiDesigner.snapShooter.CreateSnapShotAction");
@Override
public void update(AnActionEvent e) {
final Project project = e.getData(CommonDataKeys.PROJECT);
final IdeView view = e.getData(LangDataKeys.IDE_VIEW);
e.getPresentation().setVisible(project != null && view != null && hasDirectoryInPackage(project, view));
}
private static boolean hasDirectoryInPackage(final Project project, final IdeView view) {
ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(project).getFileIndex();
PsiDirectory[] dirs = view.getDirectories();
for (PsiDirectory dir : dirs) {
if (projectFileIndex.isInSourceContent(dir.getVirtualFile()) && JavaDirectoryService.getInstance().getPackage(dir) != null) {
return true;
}
}
return false;
}
public void actionPerformed(AnActionEvent e) {
final Project project = e.getData(CommonDataKeys.PROJECT);
final IdeView view = e.getData(LangDataKeys.IDE_VIEW);
if (project == null || view == null) {
return;
}
final PsiDirectory dir = view.getOrChooseDirectory();
if (dir == null) return;
final SnapShotClient client = new SnapShotClient();
List<RunnerAndConfigurationSettings> appConfigurations = new ArrayList<RunnerAndConfigurationSettings>();
RunnerAndConfigurationSettings snapshotConfiguration = null;
boolean connected = false;
ApplicationConfigurationType cfgType = ApplicationConfigurationType.getInstance();
List<RunnerAndConfigurationSettings> racsi = RunManager.getInstance(project).getConfigurationSettingsList(cfgType);
for(RunnerAndConfigurationSettings config: racsi) {
if (config.getConfiguration() instanceof ApplicationConfiguration) {
ApplicationConfiguration appConfig = (ApplicationConfiguration) config.getConfiguration();
appConfigurations.add(config);
if (appConfig.ENABLE_SWING_INSPECTOR) {
SnapShooterConfigurationSettings settings = SnapShooterConfigurationSettings.get(appConfig);
snapshotConfiguration = config;
if (settings.getLastPort() > 0) {
try {
client.connect(settings.getLastPort());
connected = true;
}
catch(IOException ex) {
connected = false;
}
}
}
if (connected) break;
}
}
if (snapshotConfiguration == null) {
snapshotConfiguration = promptForSnapshotConfiguration(project, appConfigurations);
if (snapshotConfiguration == null) return;
}
if (!connected) {
int rc = Messages.showYesNoDialog(project, UIDesignerBundle.message("snapshot.run.prompt"),
UIDesignerBundle.message("snapshot.title"), Messages.getQuestionIcon());
if (rc == 1) return;
final ApplicationConfiguration appConfig = (ApplicationConfiguration) snapshotConfiguration.getConfiguration();
final SnapShooterConfigurationSettings settings = SnapShooterConfigurationSettings.get(appConfig);
settings.setNotifyRunnable(new Runnable() {
public void run() {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.prepare.notice"),
UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon());
try {
client.connect(settings.getLastPort());
}
catch(IOException ex) {
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.connection.error"),
UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon());
return;
}
runSnapShooterSession(client, project, dir, view);
}
});
}
});
try {
final ProgramRunner runner = RunnerRegistry.getInstance().getRunner(DefaultRunExecutor.EXECUTOR_ID, appConfig);
LOG.assertTrue(runner != null, "Runner MUST not be null!");
Executor executor = DefaultRunExecutor.getRunExecutorInstance();
runner.execute(
new ExecutionEnvironment(executor, runner, snapshotConfiguration, project));
}
catch (ExecutionException ex) {
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.run.error", ex.getMessage()),
UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon());
}
}
else {
runSnapShooterSession(client, project, dir, view);
}
}
private static void runSnapShooterSession(final SnapShotClient client, final Project project, final PsiDirectory dir, final IdeView view) {
try {
client.suspendSwing();
}
catch (IOException e1) {
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.connection.error"),
UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon());
return;
}
final MyDialog dlg = new MyDialog(project, client, dir);
dlg.show();
if (dlg.getExitCode() == DialogWrapper.OK_EXIT_CODE) {
final int id = dlg.getSelectedComponentId();
final Ref<Object> result = new Ref<Object>();
ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
try {
result.set(client.createSnapshot(id));
}
catch (Exception ex) {
result.set(ex);
}
}
}, UIDesignerBundle.message("progress.creating.snapshot"), false, project);
String snapshot = null;
if (result.get() instanceof String) {
snapshot = (String) result.get();
}
else {
Exception ex = (Exception) result.get();
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.create.error", ex.getMessage()),
UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon());
}
if (snapshot != null) {
final String snapshot1 = snapshot;
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
CommandProcessor.getInstance().executeCommand(project, new Runnable() {
public void run() {
try {
PsiFile formFile = PsiFileFactory.getInstance(dir.getProject())
.createFileFromText(dlg.getFormName() + GuiFormFileType.DOT_DEFAULT_EXTENSION, snapshot1);
formFile = (PsiFile)dir.add(formFile);
formFile.getVirtualFile().setCharset(CharsetToolkit.UTF8_CHARSET);
formFile.getViewProvider().getDocument().setText(snapshot1);
view.selectElement(formFile);
}
catch (IncorrectOperationException ex) {
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.save.error", ex.getMessage()),
UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon());
}
}
}, "", null);
}
});
}
}
try {
client.resumeSwing();
}
catch (IOException ex) {
Messages.showErrorDialog(project, UIDesignerBundle.message("snapshot.connection.broken"),
UIDesignerBundle.message("snapshot.title"));
}
client.dispose();
}
@Nullable
private static RunnerAndConfigurationSettings promptForSnapshotConfiguration(final Project project,
final List<RunnerAndConfigurationSettings> configurations) {
if (configurations.isEmpty()) {
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.no.configuration.error"),
UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon());
return null;
}
for(int i=configurations.size()-1; i >= 0; i--) {
final JreVersionDetector detector = new JreVersionDetector();
final ApplicationConfiguration configuration = (ApplicationConfiguration)configurations.get(i).getConfiguration();
if (!detector.isJre50Configured(configuration) && !detector.isModuleJre50Configured(configuration)) {
configurations.remove(i);
}
}
if (configurations.isEmpty()) {
Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.no.compatible.configuration.error"),
UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon());
return null;
}
final RunnerAndConfigurationSettings snapshotConfiguration;
if (configurations.size() == 1) {
final int rc = Messages.showYesNoDialog(
project,
UIDesignerBundle.message("snapshot.confirm.configuration.prompt", configurations.get(0).getConfiguration().getName()),
UIDesignerBundle.message("snapshot.title"),
Messages.getQuestionIcon());
if (rc == 1) {
return null;
}
snapshotConfiguration = configurations.get(0);
}
else {
String[] names = new String[configurations.size()];
for(int i=0; i<configurations.size(); i++) {
names [i] = configurations.get(i).getConfiguration().getName();
}
int rc = Messages.showChooseDialog(
project,
UIDesignerBundle.message("snapshot.choose.configuration.prompt"),
UIDesignerBundle.message("snapshot.title"),
Messages.getQuestionIcon(),
names,
names [0]
);
if (rc < 0) return null;
snapshotConfiguration = configurations.get(rc);
}
((ApplicationConfiguration) snapshotConfiguration.getConfiguration()).ENABLE_SWING_INSPECTOR = true;
return snapshotConfiguration;
}
private static class MyDialog extends DialogWrapper {
private JPanel myRootPanel;
private JTree myComponentTree;
private JTextField myFormNameTextField;
private JLabel myErrorLabel;
private final Project myProject;
private final SnapShotClient myClient;
private final PsiDirectory myDirectory;
@NonNls private static final String SWING_PACKAGE = "javax.swing.";
private MyDialog(Project project, final SnapShotClient client, final PsiDirectory dir) {
super(project, true);
myProject = project;
myClient = client;
myDirectory = dir;
init();
setTitle(UIDesignerBundle.message("snapshot.title"));
final SnapShotTreeModel model = new SnapShotTreeModel(client);
myComponentTree.setModel(model);
myComponentTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
myComponentTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() {
public void valueChanged(TreeSelectionEvent e) {
updateOKAction();
}
});
for(int i=0; i<2; i++) {
for(int row=myComponentTree.getRowCount()-1; row >= 0; row--) {
myComponentTree.expandRow(row);
}
}
myComponentTree.getSelectionModel().setSelectionPath(myComponentTree.getPathForRow(0));
myFormNameTextField.setText(suggestFormName());
final EditorColorsScheme globalScheme = EditorColorsManager.getInstance().getGlobalScheme();
final TextAttributes attributes = globalScheme.getAttributes(JavaHighlightingColors.STRING);
final SimpleTextAttributes titleAttributes =
new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, attributes.getForegroundColor());
myComponentTree.setCellRenderer(new ColoredTreeCellRenderer() {
public void customizeCellRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
SnapShotRemoteComponent rc = (SnapShotRemoteComponent) value;
String className = rc.getClassName();
if (className.startsWith(SWING_PACKAGE)) {
append(className.substring(SWING_PACKAGE.length()), SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
else {
append(className, SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
if (rc.getText().length() > 0) {
append(" \"" + rc.getText() + "\"", titleAttributes);
}
if (rc.getLayoutManager().length() > 0) {
append(" (" + rc.getLayoutManager() + ")", SimpleTextAttributes.GRAY_ATTRIBUTES);
}
if (rc.isTopLevel()) {
setIcon(AllIcons.FileTypes.UiForm);
}
else {
final Palette palette = Palette.getInstance(myProject);
final ComponentItem item = palette.getItem(rc.getClassName());
if (item != null) {
setIcon(item.getSmallIcon());
}
else {
setIcon(UIDesignerIcons.Unknown);
}
}
}
});
myFormNameTextField.getDocument().addDocumentListener(new DocumentAdapter() {
protected void textChanged(DocumentEvent e) {
updateOKAction();
}
});
updateOKAction();
}
@NonNls
private String suggestFormName() {
int count = 0;
do {
count++;
}
while(myDirectory.findFile("Form" + count + GuiFormFileType.DOT_DEFAULT_EXTENSION) != null);
return "Form" + count;
}
private void updateOKAction() {
final boolean selectedComponentValid = isSelectedComponentValid();
setOKActionEnabled(isFormNameValid() && selectedComponentValid);
if (myComponentTree.getSelectionPath() != null && !selectedComponentValid) {
myErrorLabel.setText(UIDesignerBundle.message("snapshooter.invalid.container"));
}
else {
myErrorLabel.setText(" ");
}
}
private boolean isSelectedComponentValid() {
final TreePath selectionPath = myComponentTree.getSelectionPath();
if (selectionPath == null) return false;
SnapShotRemoteComponent rc = (SnapShotRemoteComponent) selectionPath.getLastPathComponent();
if (isValidComponent(rc)) return true;
if (selectionPath.getPathCount() == 2) {
// capture frame/dialog root pane when a frame or dialog itself is selected
final SnapShotRemoteComponent[] children = rc.getChildren();
return children != null && children.length > 0 && isValidComponent(children[0]);
}
return false;
}
private boolean isValidComponent(final SnapShotRemoteComponent rc) {
PsiClass componentClass =
JavaPsiFacade.getInstance(myProject).findClass(rc.getClassName().replace('$', '.'), GlobalSearchScope.allScope(myProject));
while(componentClass != null) {
if (JPanel.class.getName().equals(componentClass.getQualifiedName()) ||
JTabbedPane.class.getName().equals(componentClass.getQualifiedName()) ||
JScrollPane.class.getName().equals(componentClass.getQualifiedName()) ||
JSplitPane.class.getName().equals(componentClass.getQualifiedName())) {
return true;
}
componentClass = componentClass.getSuperClass();
}
return false;
}
private boolean isFormNameValid() {
return myFormNameTextField.getText().length() > 0;
}
@Override @NonNls
protected String getDimensionServiceKey() {
return "CreateSnapShotAction.MyDialog";
}
@Override
public JComponent getPreferredFocusedComponent() {
return myFormNameTextField;
}
@NotNull
@Override
protected Action getOKAction() {
final Action okAction = super.getOKAction();
okAction.putValue(Action.NAME, UIDesignerBundle.message("create.snapshot.button"));
return okAction;
}
@Override
protected void doOKAction() {
if (getOKAction().isEnabled()) {
try {
myDirectory.checkCreateFile(getFormName() + GuiFormFileType.DOT_DEFAULT_EXTENSION);
}
catch (IncorrectOperationException e) {
JOptionPane.showMessageDialog(myRootPanel, UIDesignerBundle.message("error.form.already.exists", getFormName()));
return;
}
if (!checkUnknownLayoutManagers(myDirectory.getProject())) return;
close(OK_EXIT_CODE);
}
}
private boolean checkUnknownLayoutManagers(final Project project) {
final Set<String> layoutManagerClasses = new TreeSet<String>();
final SnapShotRemoteComponent rc = (SnapShotRemoteComponent) myComponentTree.getSelectionPath().getLastPathComponent();
assert rc != null;
final Ref<Exception> err = new Ref<Exception>();
Runnable runnable = new Runnable() {
public void run() {
try {
collectUnknownLayoutManagerClasses(project, rc, layoutManagerClasses);
}
catch (IOException e) {
err.set(e);
}
}
};
if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(runnable,
UIDesignerBundle.message("progress.validating.layout.managers"),
false, project)) {
return false;
}
if (!err.isNull()) {
Messages.showErrorDialog(myRootPanel, UIDesignerBundle.message("snapshot.connection.broken"), UIDesignerBundle.message("snapshot.title"));
return false;
}
if (!layoutManagerClasses.isEmpty()) {
StringBuilder builder = new StringBuilder(UIDesignerBundle.message("snapshot.unknown.layout.prefix"));
for(String layoutManagerClass: layoutManagerClasses) {
builder.append(layoutManagerClass).append("\n");
}
builder.append(UIDesignerBundle.message("snapshot.unknown.layout.prompt"));
return Messages.showYesNoDialog(myProject, builder.toString(),
UIDesignerBundle.message("snapshot.title"), Messages.getQuestionIcon()) == 0;
}
return true;
}
private void collectUnknownLayoutManagerClasses(final Project project, final SnapShotRemoteComponent rc,
final Set<String> layoutManagerClasses) throws IOException {
RadComponentFactory factory = InsertComponentProcessor.getRadComponentFactory(project, rc.getClassName());
if (factory instanceof RadContainer.Factory && rc.getLayoutManager().length() > 0 &&
!LayoutManagerRegistry.isKnownLayoutClass(rc.getLayoutManager())) {
layoutManagerClasses.add(rc.getLayoutManager());
}
SnapShotRemoteComponent[] children = rc.getChildren();
if (children == null) {
children = myClient.listChildren(rc.getId());
rc.setChildren(children);
}
for(SnapShotRemoteComponent child: children) {
collectUnknownLayoutManagerClasses(project, child, layoutManagerClasses);
}
}
@Nullable
protected JComponent createCenterPanel() {
return myRootPanel;
}
public int getSelectedComponentId() {
final TreePath selectionPath = myComponentTree.getSelectionPath();
SnapShotRemoteComponent rc = (SnapShotRemoteComponent) selectionPath.getLastPathComponent();
if (!isValidComponent(rc) && selectionPath.getPathCount() == 2) {
// capture frame/dialog root pane when a frame or dialog itself is selected
final SnapShotRemoteComponent[] children = rc.getChildren();
if (children != null && children.length > 0 && isValidComponent(children [0])) {
return children [0].getId();
}
}
return rc.getId();
}
public String getFormName() {
return myFormNameTextField.getText();
}
}
}
| apache-2.0 |
locationtech/geowave | core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/SelectStatementTest.java | 15592 | /**
* Copyright (c) 2013-2020 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.core.store.query.gwql;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.text.ParseException;
import org.junit.Test;
import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter;
import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType;
import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField;
import org.locationtech.geowave.core.store.api.DataStore;
import org.locationtech.geowave.core.store.query.filter.expression.And;
import org.locationtech.geowave.core.store.query.filter.expression.Filter;
import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween;
import org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator;
import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser;
import org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement;
import org.locationtech.geowave.core.store.query.gwql.statement.Statement;
public class SelectStatementTest extends AbstractGWQLTest {
@Test
public void testInvalidStatements() {
final DataStore dataStore = createDataStore();
// Missing from
assertInvalidStatement(dataStore, "SELECT *", "expecting FROM");
// Missing store and type name
assertInvalidStatement(dataStore, "SELECT * FROM", "missing IDENTIFIER");
// Missing everything
assertInvalidStatement(dataStore, "SELECT", "expecting {'*', IDENTIFIER}");
// All columns and single selector
assertInvalidStatement(dataStore, "SELECT *, pop FROM type", "expecting FROM");
// All columns and aggregation selector
assertInvalidStatement(dataStore, "SELECT *, agg(column) FROM type", "expecting FROM");
// Nonexistent type
assertInvalidStatement(dataStore, "SELECT * FROM nonexistent", "No type named nonexistent");
// No selectors
assertInvalidStatement(dataStore, "SELECT FROM type", "expecting {'*', IDENTIFIER}");
// Aggregation and non aggregation selectors
assertInvalidStatement(dataStore, "SELECT agg(*), pop FROM type", "expecting '('");
// No where filter
assertInvalidStatement(dataStore, "SELECT * FROM type WHERE", "mismatched input '<EOF>'");
// No limit count
assertInvalidStatement(dataStore, "SELECT * FROM type LIMIT", "missing INTEGER");
// Non-integer limit count
assertInvalidStatement(dataStore, "SELECT * FROM type LIMIT 1.5", "expecting INTEGER");
// Missing column alias
assertInvalidStatement(dataStore, "SELECT pop AS FROM type", "expecting IDENTIFIER");
}
@Test
public void testValidStatements() {
final DataStore dataStore = createDataStore();
GWQLParser.parseStatement(dataStore, "SELECT * FROM type");
GWQLParser.parseStatement(dataStore, "SELECT * FROM type LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT * FROM type WHERE pop < 1");
GWQLParser.parseStatement(dataStore, "SELECT * FROM type WHERE pop > 1 LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type");
GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type WHERE pop < 1");
GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type WHERE pop > 1 LIMIT 2");
GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type");
GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type WHERE pop < 1");
GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type WHERE pop > 1 LIMIT 2");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type WHERE pop < 1");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type WHERE pop > 1 LIMIT 3");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type WHERE pop < 1");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type WHERE pop > 1 LIMIT 3");
GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type");
GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type WHERE pop < 1");
GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type WHERE pop > 1 LIMIT 4");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type LIMIT 1");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type WHERE pop < 1");
GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type WHERE pop > 1 LIMIT 4");
}
@Test
public void testAllColumns() throws ParseException, IOException {
final DataStore dataStore = createDataStore();
final String statement = "SELECT * FROM type";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertFalse(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("type", selectStatement.getAdapter().getTypeName());
assertNull(selectStatement.getFilter());
}
@Test
public void testAllColumnsWithFilter() throws ParseException, IOException {
final DataStore dataStore = createDataStore();
final String statement = "SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc'";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertFalse(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("type", selectStatement.getAdapter().getTypeName());
assertNotNull(selectStatement.getFilter());
Filter filter = selectStatement.getFilter();
assertTrue(filter instanceof And);
And andFilter = (And) filter;
assertTrue(andFilter.getChildren().length == 2);
assertTrue(andFilter.getChildren()[0] instanceof NumericBetween);
assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator);
assertNull(selectStatement.getLimit());
}
@Test
public void testAllColumnsWithFilterAndLimit() throws ParseException, IOException {
final DataStore dataStore = createDataStore();
final String statement =
"SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc' LIMIT 1";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertFalse(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("type", selectStatement.getAdapter().getTypeName());
assertNotNull(selectStatement.getFilter());
Filter filter = selectStatement.getFilter();
assertTrue(filter instanceof And);
And andFilter = (And) filter;
assertTrue(andFilter.getChildren().length == 2);
assertTrue(andFilter.getChildren()[0] instanceof NumericBetween);
assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator);
assertNotNull(selectStatement.getLimit());
assertEquals(1, selectStatement.getLimit().intValue());
}
@Test
public void testAggregation() {
final DataStore dataStore = createDataStore();
final String statement = "SELECT sum(pop) FROM type";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertTrue(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("type", selectStatement.getAdapter().getTypeName());
assertNotNull(selectStatement.getSelectors());
assertTrue(selectStatement.getSelectors().size() == 1);
assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector);
AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0);
assertNull(selector.alias());
assertEquals("sum", selector.functionName());
assertEquals(1, selector.functionArgs().length);
assertEquals("pop", selector.functionArgs()[0]);
assertNull(selectStatement.getFilter());
}
@Test
public void testAggregationAlias() {
final DataStore dataStore = createDataStore();
final String statement = "SELECT sum(pop) AS total FROM type";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertTrue(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("type", selectStatement.getAdapter().getTypeName());
assertNotNull(selectStatement.getSelectors());
assertTrue(selectStatement.getSelectors().size() == 1);
assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector);
AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0);
assertEquals("total", selector.alias());
assertEquals("sum", selector.functionName());
assertEquals(1, selector.functionArgs().length);
assertEquals("pop", selector.functionArgs()[0]);
assertNull(selectStatement.getFilter());
}
@Test
public void testColumnSubset() {
final DataStore dataStore = createDataStore();
final String statement = "SELECT pop, start, end FROM type";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertFalse(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("type", selectStatement.getAdapter().getTypeName());
assertNotNull(selectStatement.getSelectors());
assertTrue(selectStatement.getSelectors().size() == 3);
assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector);
ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0);
assertNull(selector.alias());
assertEquals("pop", selector.columnName());
assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector);
selector = (ColumnSelector) selectStatement.getSelectors().get(1);
assertNull(selector.alias());
assertEquals("start", selector.columnName());
assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector);
selector = (ColumnSelector) selectStatement.getSelectors().get(2);
assertNull(selector.alias());
assertEquals("end", selector.columnName());
assertNull(selectStatement.getFilter());
}
@Test
public void testColumnSubsetWithAliases() {
final DataStore dataStore = createDataStore();
final String statement = "SELECT pop AS pop_alt, start, end AS end_alt FROM type";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertFalse(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("type", selectStatement.getAdapter().getTypeName());
assertNotNull(selectStatement.getSelectors());
assertTrue(selectStatement.getSelectors().size() == 3);
assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector);
ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0);
assertEquals("pop_alt", selector.alias());
assertEquals("pop", selector.columnName());
assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector);
selector = (ColumnSelector) selectStatement.getSelectors().get(1);
assertNull(selector.alias());
assertEquals("start", selector.columnName());
assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector);
selector = (ColumnSelector) selectStatement.getSelectors().get(2);
assertEquals("end_alt", selector.alias());
assertEquals("end", selector.columnName());
assertNull(selectStatement.getFilter());
}
@Test
public void testUnconventionalNaming() {
final DataStore dataStore =
createDataStore(
BasicDataTypeAdapter.newAdapter("ty-p3", UnconventionalNameType.class, "pid"),
"a-1");
final String statement = "SELECT [a-1], `b-2`, \"c-3\" FROM [ty-p3]";
final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement);
assertTrue(gwStatement instanceof SelectStatement);
final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement;
assertFalse(selectStatement.isAggregation());
assertNotNull(selectStatement.getAdapter());
assertEquals("ty-p3", selectStatement.getAdapter().getTypeName());
assertNotNull(selectStatement.getSelectors());
assertTrue(selectStatement.getSelectors().size() == 3);
assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector);
ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0);
assertNull(selector.alias());
assertEquals("a-1", selector.columnName());
assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector);
selector = (ColumnSelector) selectStatement.getSelectors().get(1);
assertNull(selector.alias());
assertEquals("b-2", selector.columnName());
assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector);
selector = (ColumnSelector) selectStatement.getSelectors().get(2);
assertNull(selector.alias());
assertEquals("c-3", selector.columnName());
assertNull(selectStatement.getFilter());
}
@GeoWaveDataType
protected static class UnconventionalNameType {
@GeoWaveField(name = "pid")
private String pid;
@GeoWaveField(name = "a-1")
private Long a1;
@GeoWaveField(name = "b-2")
private Long b2;
@GeoWaveField(name = "c-3")
private Long c3;
public UnconventionalNameType() {}
public UnconventionalNameType(final String pid, final Long a1, final Long b2, final Long c3) {
this.pid = pid;
this.a1 = a1;
this.b2 = b2;
this.c3 = c3;
}
}
}
| apache-2.0 |
agavra/fastutil | src/it/unimi/dsi/fastutil/Size64.java | 2069 | package it.unimi.dsi.fastutil;
/*
* Copyright (C) 2010-2015 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Collection;
/** An interface for data structures whose size can exceed {@link Integer#MAX_VALUE}.
*
* <P>The only methods specified by this interfaces are {@link #size64()}, and
* a deprecated {@link #size()} identical to {@link Collection#size()}. Implementations
* can work around the type problem of {@link java.util.Collection#size()}
* (e.g., not being able to return more than {@link Integer#MAX_VALUE}) by implementing this
* interface. Callers interested in large structures
* can use a reflective call to <code>instanceof</code> to check for the presence of {@link #size64()}.
*
* <p>We remark that it is always a good idea to implement both {@link #size()} <em>and</em> {@link #size64()},
* as the former might be implemented by a superclass in an incompatible way. If you implement this interface,
* just implement {@link #size()} as a <em>deprecated</em> method returning <code>Math.min(Integer.MAX_VALUE, size64())</code>.
*/
public interface Size64 {
/** Returns the size of this data structure as a long.
*
* @return the size of this data structure.
*/
long size64();
/** Returns the size of this data structure, minimized with {@link Integer#MAX_VALUE}.
*
* @return the size of this data structure, minimized with {@link Integer#MAX_VALUE}.
* @see java.util.Collection#size()
* @deprecated Use {@link #size64()} instead.
*/
@Deprecated
int size();
}
| apache-2.0 |
hgschmie/presto | presto-hive/src/main/java/io/prestosql/plugin/hive/RcFileFileWriterFactory.java | 7137 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive;
import com.google.common.collect.ImmutableMap;
import io.prestosql.plugin.hive.metastore.StorageFormat;
import io.prestosql.plugin.hive.rcfile.HdfsRcFileDataSource;
import io.prestosql.rcfile.RcFileDataSource;
import io.prestosql.rcfile.RcFileEncoding;
import io.prestosql.rcfile.binary.BinaryRcFileEncoding;
import io.prestosql.spi.PrestoException;
import io.prestosql.spi.connector.ConnectorSession;
import io.prestosql.spi.type.Type;
import io.prestosql.spi.type.TypeManager;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.joda.time.DateTimeZone;
import javax.inject.Inject;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.function.Supplier;
import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR;
import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITE_VALIDATION_FAILED;
import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME;
import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME;
import static io.prestosql.plugin.hive.HiveSessionProperties.isRcfileOptimizedWriterValidate;
import static io.prestosql.plugin.hive.rcfile.RcFilePageSourceFactory.createTextVectorEncoding;
import static io.prestosql.plugin.hive.util.HiveUtil.getColumnNames;
import static io.prestosql.plugin.hive.util.HiveUtil.getColumnTypes;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public class RcFileFileWriterFactory
implements HiveFileWriterFactory
{
private final DateTimeZone hiveStorageTimeZone;
private final HdfsEnvironment hdfsEnvironment;
private final TypeManager typeManager;
private final NodeVersion nodeVersion;
private final FileFormatDataSourceStats stats;
@Inject
public RcFileFileWriterFactory(
HdfsEnvironment hdfsEnvironment,
TypeManager typeManager,
NodeVersion nodeVersion,
HiveConfig hiveConfig,
FileFormatDataSourceStats stats)
{
this(hdfsEnvironment, typeManager, nodeVersion, requireNonNull(hiveConfig, "hiveConfig is null").getDateTimeZone(), stats);
}
public RcFileFileWriterFactory(
HdfsEnvironment hdfsEnvironment,
TypeManager typeManager,
NodeVersion nodeVersion,
DateTimeZone hiveStorageTimeZone,
FileFormatDataSourceStats stats)
{
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.typeManager = requireNonNull(typeManager, "typeManager is null");
this.nodeVersion = requireNonNull(nodeVersion, "nodeVersion is null");
this.hiveStorageTimeZone = requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
this.stats = requireNonNull(stats, "stats is null");
}
@Override
public Optional<FileWriter> createFileWriter(
Path path,
List<String> inputColumnNames,
StorageFormat storageFormat,
Properties schema,
JobConf configuration,
ConnectorSession session)
{
if (!RCFileOutputFormat.class.getName().equals(storageFormat.getOutputFormat())) {
return Optional.empty();
}
RcFileEncoding rcFileEncoding;
if (LazyBinaryColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) {
rcFileEncoding = new BinaryRcFileEncoding();
}
else if (ColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) {
rcFileEncoding = createTextVectorEncoding(schema, hiveStorageTimeZone);
}
else {
return Optional.empty();
}
Optional<String> codecName = Optional.ofNullable(configuration.get(FileOutputFormat.COMPRESS_CODEC));
// existing tables and partitions may have columns in a different order than the writer is providing, so build
// an index to rearrange columns in the proper order
List<String> fileColumnNames = getColumnNames(schema);
List<Type> fileColumnTypes = getColumnTypes(schema).stream()
.map(hiveType -> hiveType.getType(typeManager))
.collect(toList());
int[] fileInputColumnIndexes = fileColumnNames.stream()
.mapToInt(inputColumnNames::indexOf)
.toArray();
try {
FileSystem fileSystem = hdfsEnvironment.getFileSystem(session.getUser(), path, configuration);
OutputStream outputStream = fileSystem.create(path);
Optional<Supplier<RcFileDataSource>> validationInputFactory = Optional.empty();
if (isRcfileOptimizedWriterValidate(session)) {
validationInputFactory = Optional.of(() -> {
try {
return new HdfsRcFileDataSource(
path.toString(),
fileSystem.open(path),
fileSystem.getFileStatus(path).getLen(),
stats);
}
catch (IOException e) {
throw new PrestoException(HIVE_WRITE_VALIDATION_FAILED, e);
}
});
}
Callable<Void> rollbackAction = () -> {
fileSystem.delete(path, false);
return null;
};
return Optional.of(new RcFileFileWriter(
outputStream,
rollbackAction,
rcFileEncoding,
fileColumnTypes,
codecName,
fileInputColumnIndexes,
ImmutableMap.<String, String>builder()
.put(PRESTO_VERSION_NAME, nodeVersion.toString())
.put(PRESTO_QUERY_ID_NAME, session.getQueryId())
.build(),
validationInputFactory));
}
catch (Exception e) {
throw new PrestoException(HIVE_WRITER_OPEN_ERROR, "Error creating RCFile file", e);
}
}
}
| apache-2.0 |
shantstepanian/obevo | obevo-db/src/main/java/com/gs/obevo/db/api/platform/DbDeployerAppContext.java | 2854 | /**
* Copyright 2017 Goldman Sachs.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.gs.obevo.db.api.platform;
import javax.sql.DataSource;
import com.gs.obevo.api.appdata.Change;
import com.gs.obevo.api.appdata.ChangeInput;
import com.gs.obevo.api.platform.DeployExecutionDao;
import com.gs.obevo.api.platform.DeployerAppContext;
import com.gs.obevo.api.platform.MainDeployerArgs;
import com.gs.obevo.db.api.appdata.DbEnvironment;
import com.gs.obevo.db.impl.core.checksum.DbChecksumDao;
import com.gs.obevo.dbmetadata.api.DbMetadataManager;
import org.eclipse.collections.api.list.ImmutableList;
public interface DbDeployerAppContext extends DeployerAppContext<DbEnvironment, DbDeployerAppContext> {
boolean STRICT_SETUP_ENV_INFRA_DEFAULT = false;
/**
* Sets whether to fail the command if the environment setup fails for certain operations (true) or to log a warning (false).
*
* @deprecated Renamed to {@link #setStrictSetupEnvInfra(boolean)}
*/
@Deprecated
DbDeployerAppContext setFailOnSetupException(boolean failOnSetupException);
/**
* Sets whether to fail the command if the environment setup fails for certain operations (true) or to log a warning (false).
*/
DbDeployerAppContext setStrictSetupEnvInfra(boolean strictSetupEnvInfra);
ImmutableList<Change> readChangesFromAudit();
ImmutableList<ChangeInput> readChangesFromSource();
ImmutableList<ChangeInput> readChangesFromSource(boolean useBaseline);
DbMetadataManager getDbMetadataManager();
SqlExecutor getSqlExecutor();
DeployExecutionDao getDeployExecutionDao();
DbChecksumDao getDbChecksumDao();
/**
* Data Source with a single shared connection that clients can use to access the database being deployed.
* This should NOT be used by this internal product code. This is only here for external clients.
*/
DataSource getDataSource();
DbDeployerAppContext cleanAndDeploy();
DbDeployerAppContext setupAndCleanAndDeploy();
/**
* Read in the input files and return stats. Only used for cases w/ some external integrations where a client wants
* to read the metrics from the input source.
*/
void readSource(MainDeployerArgs deployerArgs);
}
| apache-2.0 |
jsonking/mongo-java-driver | driver-core/src/test/unit/com/mongodb/client/ImmutableDocumentCodec.java | 2742 | /*
* Copyright 2016 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.client;
import org.bson.BsonReader;
import org.bson.BsonValue;
import org.bson.BsonWriter;
import org.bson.Document;
import org.bson.codecs.CollectibleCodec;
import org.bson.codecs.DecoderContext;
import org.bson.codecs.EncoderContext;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.types.ObjectId;
import java.util.LinkedHashMap;
import static java.lang.String.format;
public final class ImmutableDocumentCodec implements CollectibleCodec<ImmutableDocument> {
private final CodecRegistry codecRegistry;
private static final String ID_FIELD_NAME = "_id";
public ImmutableDocumentCodec(final CodecRegistry codecRegistry) {
this.codecRegistry = codecRegistry;
}
@Override
public ImmutableDocument generateIdIfAbsentFromDocument(final ImmutableDocument document) {
LinkedHashMap<String, Object> mutable = new LinkedHashMap<String, Object>(document);
mutable.put(ID_FIELD_NAME, new ObjectId());
return new ImmutableDocument(mutable);
}
@Override
public boolean documentHasId(final ImmutableDocument document) {
return document.containsKey(ID_FIELD_NAME);
}
@Override
public BsonValue getDocumentId(final ImmutableDocument document) {
if (!documentHasId(document)) {
throw new IllegalStateException(format("The document does not contain an %s", ID_FIELD_NAME));
}
return document.toBsonDocument(ImmutableDocument.class, codecRegistry).get(ID_FIELD_NAME);
}
@Override
public void encode(final BsonWriter writer, final ImmutableDocument value, final EncoderContext encoderContext) {
codecRegistry.get(Document.class).encode(writer, new Document(value), encoderContext);
}
@Override
public Class<ImmutableDocument> getEncoderClass() {
return ImmutableDocument.class;
}
@Override
public ImmutableDocument decode(final BsonReader reader, final DecoderContext decoderContext) {
Document document = codecRegistry.get(Document.class).decode(reader, decoderContext);
return new ImmutableDocument(document);
}
}
| apache-2.0 |
petracvv/cas | support/cas-server-support-stormpath/src/main/java/org/apereo/cas/config/StormpathAuthenticationConfiguration.java | 3528 | package org.apereo.cas.config;
import org.apache.commons.lang3.StringUtils;
import org.apereo.cas.authentication.AuthenticationEventExecutionPlan;
import org.apereo.cas.authentication.AuthenticationHandler;
import org.apereo.cas.authentication.StormpathAuthenticationHandler;
import org.apereo.cas.authentication.principal.DefaultPrincipalFactory;
import org.apereo.cas.authentication.principal.PrincipalFactory;
import org.apereo.cas.authentication.principal.PrincipalResolver;
import org.apereo.cas.authentication.AuthenticationEventExecutionPlanConfigurer;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.configuration.model.support.stormpath.StormpathProperties;
import org.apereo.cas.configuration.support.Beans;
import org.apereo.cas.services.ServicesManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* This is {@link StormpathAuthenticationConfiguration}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Configuration("stormpathAuthenticationConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class StormpathAuthenticationConfiguration {
@Autowired
private CasConfigurationProperties casProperties;
@Autowired
@Qualifier("servicesManager")
private ServicesManager servicesManager;
@ConditionalOnMissingBean(name = "stormpathPrincipalFactory")
@Bean
public PrincipalFactory stormpathPrincipalFactory() {
return new DefaultPrincipalFactory();
}
@Bean
public AuthenticationHandler stormpathAuthenticationHandler() {
final StormpathProperties stormpath = casProperties.getAuthn().getStormpath();
final StormpathAuthenticationHandler handler = new StormpathAuthenticationHandler(stormpath.getName(), servicesManager, stormpathPrincipalFactory(),
null, stormpath.getApiKey(), stormpath.getApplicationId(), stormpath.getSecretkey());
handler.setPasswordEncoder(Beans.newPasswordEncoder(stormpath.getPasswordEncoder()));
handler.setPrincipalNameTransformer(Beans.newPrincipalNameTransformer(stormpath.getPrincipalTransformation()));
return handler;
}
/**
* The type Stormpath authentication event execution plan configuration.
*/
@Configuration("stormpathAuthenticationEventExecutionPlanConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class StormpathAuthenticationEventExecutionPlanConfiguration implements AuthenticationEventExecutionPlanConfigurer {
@Autowired
@Qualifier("personDirectoryPrincipalResolver")
private PrincipalResolver personDirectoryPrincipalResolver;
@Override
public void configureAuthenticationExecutionPlan(final AuthenticationEventExecutionPlan plan) {
final StormpathProperties stormpath = casProperties.getAuthn().getStormpath();
if (StringUtils.isNotBlank(stormpath.getApiKey()) && StringUtils.isNotBlank(stormpath.getSecretkey())) {
plan.registerAuthenticationHandlerWithPrincipalResolver(stormpathAuthenticationHandler(), personDirectoryPrincipalResolver);
}
}
}
}
| apache-2.0 |
moorkop/mccy-engine | src/main/java/me/itzg/mccy/services/WebServerPortProviderImpl.java | 948 | package me.itzg.mccy.services;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.context.embedded.EmbeddedServletContainerInitializedEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Service;
@Service
public class WebServerPortProviderImpl implements WebServerPortProvider,
ApplicationListener<EmbeddedServletContainerInitializedEvent> {
private static final Logger LOG = LoggerFactory.getLogger(WebServerPortProviderImpl.class);
private int port = -1;
@Override
public int getPort() {
return port;
}
@Override
public void onApplicationEvent(EmbeddedServletContainerInitializedEvent embeddedServletContainerInitializedEvent) {
this.port = embeddedServletContainerInitializedEvent.getEmbeddedServletContainer().getPort();
LOG.debug("Discovered web container port to be {}", this.port);
}
}
| apache-2.0 |
ernestp/consulo | platform/vcs-log-graph-api/src/com/intellij/vcs/log/graph/actions/GraphMouseAction.java | 946 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.vcs.log.graph.actions;
import com.intellij.vcs.log.graph.PrintElement;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
*/
public interface GraphMouseAction {
@Nullable
PrintElement getAffectedElement();
@NotNull
Type getType();
enum Type {
CLICK,
OVER
}
}
| apache-2.0 |
apache/uima-uimaj | uimaj-tools/src/main/java/org/apache/uima/tools/cvd/control/AboutHandler.java | 2611 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.tools.cvd.control;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.Icon;
import javax.swing.JOptionPane;
import org.apache.uima.UIMAFramework;
import org.apache.uima.impl.UimaVersion;
import org.apache.uima.tools.cvd.MainFrame;
import org.apache.uima.tools.images.Images;
/**
* The Class AboutHandler.
*/
public class AboutHandler implements ActionListener {
/** The main. */
private final MainFrame main;
/**
* Instantiates a new about handler.
*
* @param frame
* the frame
*/
public AboutHandler(MainFrame frame) {
this.main = frame;
}
/*
* (non-Javadoc)
*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed(ActionEvent e) {
String javaVersion = System.getProperty("java.version");
String javaVendor = System.getProperty("java.vendor");
javaVendor = (javaVendor == null) ? "<Unknown>" : javaVendor;
String versionInfo = null;
if (javaVersion == null) {
versionInfo = "Running on an old version of Java";
} else {
versionInfo = "Running Java " + javaVersion + " from " + javaVendor;
}
String msg = "CVD (CAS Visual Debugger)\n" + "Apache UIMA Version "
+ UIMAFramework.getVersionString() + " Copyright 2006, " + UimaVersion.getBuildYear()
+ " The Apache Software Foundation\n" + versionInfo + "\n";
Icon icon = Images.getImageIcon(Images.UIMA_LOGO_SMALL);
if (icon == null) {
JOptionPane.showMessageDialog(this.main, msg, "About CVD", JOptionPane.INFORMATION_MESSAGE);
} else {
JOptionPane.showMessageDialog(this.main, msg, "About CVD", JOptionPane.INFORMATION_MESSAGE,
icon);
}
}
} | apache-2.0 |
xuhuisheng/lemon | src/main/java/com/mossle/bpm/data/ProcessListenerDeployer.java | 2001 | package com.mossle.bpm.data;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import com.mossle.bpm.persistence.manager.BpmConfBaseManager;
import com.mossle.bpm.persistence.manager.BpmConfListenerManager;
import com.mossle.bpm.persistence.manager.BpmConfNodeManager;
import com.mossle.core.csv.CsvProcessor;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ProcessListenerDeployer {
private static Logger logger = LoggerFactory
.getLogger(ProcessListenerDeployer.class);
private BpmConfBaseManager bpmConfBaseManager;
private BpmConfListenerManager bpmConfListenerManager;
private BpmConfNodeManager bpmConfNodeManager;
private String defaultTenantId = "1";
@PostConstruct
public void init() throws Exception {
String processListenerDataFilePath = "data/process-listener.csv";
String processListenerDataEncoding = "UTF-8";
ProcessListenerCallback processListenerCallback = new ProcessListenerCallback();
processListenerCallback.setBpmConfBaseManager(bpmConfBaseManager);
processListenerCallback.setBpmConfNodeManager(bpmConfNodeManager);
processListenerCallback
.setBpmConfListenerManager(bpmConfListenerManager);
new CsvProcessor().process(processListenerDataFilePath,
processListenerDataEncoding, processListenerCallback);
}
@Resource
public void setBpmConfBaseManager(BpmConfBaseManager bpmConfBaseManager) {
this.bpmConfBaseManager = bpmConfBaseManager;
}
@Resource
public void setBpmConfNodeManager(BpmConfNodeManager bpmConfNodeManager) {
this.bpmConfNodeManager = bpmConfNodeManager;
}
@Resource
public void setBpmConfListenerManager(
BpmConfListenerManager bpmConfListenerManager) {
this.bpmConfListenerManager = bpmConfListenerManager;
}
}
| apache-2.0 |
azureplus/flex-blazeds | modules/proxy/src/flex/messaging/services/http/package-info.java | 857 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flex.messaging.services.http; | apache-2.0 |
la3lma/docker-client | src/main/java/com/spotify/docker/client/messages/ContainerInfo.java | 8429 | /*
* Copyright (c) 2014 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.docker.client.messages;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.ANY;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
@JsonAutoDetect(fieldVisibility = ANY, setterVisibility = NONE, getterVisibility = NONE)
public class ContainerInfo {
@JsonProperty("Id") private String id;
@JsonProperty("Created") private Date created;
@JsonProperty("Path") private String path;
@JsonProperty("Args") private ImmutableList<String> args;
@JsonProperty("Config") private ContainerConfig config;
@JsonProperty("HostConfig") private HostConfig hostConfig;
@JsonProperty("State") private ContainerState state;
@JsonProperty("Image") private String image;
@JsonProperty("NetworkSettings") private NetworkSettings networkSettings;
@JsonProperty("ResolvConfPath") private String resolvConfPath;
@JsonProperty("HostnamePath") private String hostnamePath;
@JsonProperty("HostsPath") private String hostsPath;
@JsonProperty("Name") private String name;
@JsonProperty("Driver") private String driver;
@JsonProperty("ExecDriver") private String execDriver;
@JsonProperty("ProcessLabel") private String processLabel;
@JsonProperty("MountLabel") private String mountLabel;
@JsonProperty("Volumes") private ImmutableMap<String, String> volumes;
@JsonProperty("VolumesRW") private ImmutableMap<String, Boolean> volumesRW;
public String id() {
return id;
}
public Date created() {
return created == null ? null : new Date(created.getTime());
}
public String path() {
return path;
}
public List<String> args() {
return args;
}
public ContainerConfig config() {
return config;
}
public HostConfig hostConfig() {
return hostConfig;
}
public ContainerState state() {
return state;
}
public String image() {
return image;
}
public NetworkSettings networkSettings() {
return networkSettings;
}
public String resolvConfPath() {
return resolvConfPath;
}
public String hostnamePath() {
return hostnamePath;
}
public String hostsPath() {
return hostsPath;
}
public String name() {
return name;
}
public String driver() {
return driver;
}
public String execDriver() {
return execDriver;
}
public String processLabel() {
return processLabel;
}
public String mountLabel() {
return mountLabel;
}
public Map<String, String> volumes() {
return volumes;
}
public Map<String, Boolean> volumesRW() {
return volumesRW;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ContainerInfo that = (ContainerInfo) o;
if (args != null ? !args.equals(that.args) : that.args != null) {
return false;
}
if (config != null ? !config.equals(that.config) : that.config != null) {
return false;
}
if (hostConfig != null ? !hostConfig.equals(that.hostConfig) : that.hostConfig != null) {
return false;
}
if (created != null ? !created.equals(that.created) : that.created != null) {
return false;
}
if (driver != null ? !driver.equals(that.driver) : that.driver != null) {
return false;
}
if (execDriver != null ? !execDriver.equals(that.execDriver) : that.execDriver != null) {
return false;
}
if (hostnamePath != null ? !hostnamePath.equals(that.hostnamePath)
: that.hostnamePath != null) {
return false;
}
if (hostsPath != null ? !hostsPath.equals(that.hostsPath) : that.hostsPath != null) {
return false;
}
if (id != null ? !id.equals(that.id) : that.id != null) {
return false;
}
if (image != null ? !image.equals(that.image) : that.image != null) {
return false;
}
if (mountLabel != null ? !mountLabel.equals(that.mountLabel) : that.mountLabel != null) {
return false;
}
if (name != null ? !name.equals(that.name) : that.name != null) {
return false;
}
if (networkSettings != null ? !networkSettings.equals(that.networkSettings)
: that.networkSettings != null) {
return false;
}
if (path != null ? !path.equals(that.path) : that.path != null) {
return false;
}
if (processLabel != null ? !processLabel.equals(that.processLabel)
: that.processLabel != null) {
return false;
}
if (resolvConfPath != null ? !resolvConfPath.equals(that.resolvConfPath)
: that.resolvConfPath != null) {
return false;
}
if (state != null ? !state.equals(that.state) : that.state != null) {
return false;
}
if (volumes != null ? !volumes.equals(that.volumes) : that.volumes != null) {
return false;
}
if (volumesRW != null ? !volumesRW.equals(that.volumesRW) : that.volumesRW != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (created != null ? created.hashCode() : 0);
result = 31 * result + (path != null ? path.hashCode() : 0);
result = 31 * result + (args != null ? args.hashCode() : 0);
result = 31 * result + (config != null ? config.hashCode() : 0);
result = 31 * result + (hostConfig != null ? hostConfig.hashCode() : 0);
result = 31 * result + (state != null ? state.hashCode() : 0);
result = 31 * result + (image != null ? image.hashCode() : 0);
result = 31 * result + (networkSettings != null ? networkSettings.hashCode() : 0);
result = 31 * result + (resolvConfPath != null ? resolvConfPath.hashCode() : 0);
result = 31 * result + (hostnamePath != null ? hostnamePath.hashCode() : 0);
result = 31 * result + (hostsPath != null ? hostsPath.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (driver != null ? driver.hashCode() : 0);
result = 31 * result + (execDriver != null ? execDriver.hashCode() : 0);
result = 31 * result + (processLabel != null ? processLabel.hashCode() : 0);
result = 31 * result + (mountLabel != null ? mountLabel.hashCode() : 0);
result = 31 * result + (volumes != null ? volumes.hashCode() : 0);
result = 31 * result + (volumesRW != null ? volumesRW.hashCode() : 0);
return result;
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("id", id)
.add("created", created)
.add("path", path)
.add("args", args)
.add("config", config)
.add("hostConfig", hostConfig)
.add("state", state)
.add("image", image)
.add("networkSettings", networkSettings)
.add("resolvConfPath", resolvConfPath)
.add("hostnamePath", hostnamePath)
.add("hostsPath", hostsPath)
.add("name", name)
.add("driver", driver)
.add("execDriver", execDriver)
.add("processLabel", processLabel)
.add("mountLabel", mountLabel)
.add("volumes", volumes)
.add("volumesRW", volumesRW)
.toString();
}
}
| apache-2.0 |
4treesCH/strolch | li.strolch.utils/src/main/java/li/strolch/utils/NamedThreadPoolFactory.java | 934 | package li.strolch.utils;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Simple {@link ThreadFactory} which allocates as a pool and has a name for each pool
*/
public class NamedThreadPoolFactory implements ThreadFactory {
private final ThreadGroup group;
private final AtomicInteger threadNumber = new AtomicInteger(1);
private final String poolName;
public NamedThreadPoolFactory(String poolName) {
SecurityManager s = System.getSecurityManager();
this.group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup();
this.poolName = poolName + "-";
}
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(this.group, r, this.poolName + this.threadNumber.getAndIncrement(), 0);
if (t.isDaemon())
t.setDaemon(false);
if (t.getPriority() != Thread.NORM_PRIORITY)
t.setPriority(Thread.NORM_PRIORITY);
return t;
}
} | apache-2.0 |
roundrop/ermasterr | src/org/insightech/er/db/impl/db2/DB2DDLCreator.java | 5719 | package org.insightech.er.db.impl.db2;
import org.insightech.er.db.impl.db2.tablespace.DB2TablespaceProperties;
import org.insightech.er.editor.model.ERDiagram;
import org.insightech.er.editor.model.dbexport.ddl.DDLCreator;
import org.insightech.er.editor.model.diagram_contents.element.node.category.Category;
import org.insightech.er.editor.model.diagram_contents.element.node.table.column.NormalColumn;
import org.insightech.er.editor.model.diagram_contents.not_element.sequence.Sequence;
import org.insightech.er.editor.model.diagram_contents.not_element.tablespace.Tablespace;
import org.insightech.er.util.Check;
import org.insightech.er.util.Format;
public class DB2DDLCreator extends DDLCreator {
public DB2DDLCreator(final ERDiagram diagram, final Category targetCategory, final boolean semicolon) {
super(diagram, targetCategory, semicolon);
}
/**
* {@inheritDoc}
*/
@Override
protected String getColulmnDDL(final NormalColumn normalColumn) {
final StringBuilder ddl = new StringBuilder();
ddl.append(super.getColulmnDDL(normalColumn));
if (normalColumn.isAutoIncrement()) {
ddl.append(" GENERATED ALWAYS AS IDENTITY ");
final Sequence sequence = normalColumn.getAutoIncrementSetting();
if (sequence.getIncrement() != null || sequence.getStart() != null) {
ddl.append("(START WITH ");
if (sequence.getStart() != null) {
ddl.append(sequence.getStart());
} else {
ddl.append("1");
}
if (sequence.getIncrement() != null) {
ddl.append(", INCREMENT BY ");
ddl.append(sequence.getIncrement());
}
ddl.append(")");
}
}
return ddl.toString();
}
@Override
protected String getDDL(final Tablespace tablespace) {
final DB2TablespaceProperties tablespaceProperties = (DB2TablespaceProperties) tablespace.getProperties(environment, getDiagram());
final StringBuilder ddl = new StringBuilder();
ddl.append("CREATE ");
if (!Check.isEmpty(tablespaceProperties.getType())) {
ddl.append(tablespaceProperties.getType());
ddl.append(" ");
}
ddl.append("TABLESPACE ");
ddl.append(filterName(tablespace.getName()));
ddl.append(LF());
if (!Check.isEmpty(tablespaceProperties.getPageSize())) {
ddl.append(" PAGESIZE ");
ddl.append(tablespaceProperties.getPageSize());
ddl.append(LF());
}
ddl.append(" MANAGED BY ");
ddl.append(tablespaceProperties.getManagedBy());
ddl.append(" USING(");
ddl.append(tablespaceProperties.getContainer());
ddl.append(")" + LF());
if (!Check.isEmpty(tablespaceProperties.getExtentSize())) {
ddl.append(" EXTENTSIZE ");
ddl.append(tablespaceProperties.getExtentSize());
ddl.append(LF());
}
if (!Check.isEmpty(tablespaceProperties.getPrefetchSize())) {
ddl.append(" PREFETCHSIZE ");
ddl.append(tablespaceProperties.getPrefetchSize());
ddl.append(LF());
}
if (!Check.isEmpty(tablespaceProperties.getBufferPoolName())) {
ddl.append(" BUFFERPOOL ");
ddl.append(tablespaceProperties.getBufferPoolName());
ddl.append(LF());
}
if (semicolon) {
ddl.append(";");
}
return ddl.toString();
}
@Override
public String getDDL(final Sequence sequence) {
final StringBuilder ddl = new StringBuilder();
final String description = sequence.getDescription();
if (semicolon && !Check.isEmpty(description) && ddlTarget.inlineTableComment) {
ddl.append("-- ");
ddl.append(replaceLF(description, LF() + "-- "));
ddl.append(LF());
}
ddl.append("CREATE ");
ddl.append("SEQUENCE ");
ddl.append(filterName(getNameWithSchema(sequence.getSchema(), sequence.getName())));
if (!Check.isEmpty(sequence.getDataType())) {
ddl.append(" AS ");
String dataType = sequence.getDataType();
dataType = dataType.replaceAll("\\(p\\)", "(" + Format.toString(sequence.getDecimalSize() + ")"));
ddl.append(dataType);
}
if (sequence.getIncrement() != null) {
ddl.append(" INCREMENT BY ");
ddl.append(sequence.getIncrement());
}
if (sequence.getMinValue() != null) {
ddl.append(" MINVALUE ");
ddl.append(sequence.getMinValue());
}
if (sequence.getMaxValue() != null) {
ddl.append(" MAXVALUE ");
ddl.append(sequence.getMaxValue());
}
if (sequence.getStart() != null) {
ddl.append(" START WITH ");
ddl.append(sequence.getStart());
}
if (!sequence.isNocache() && sequence.getCache() != null) {
ddl.append(" CACHE ");
ddl.append(sequence.getCache());
}
if (sequence.isCycle()) {
ddl.append(" CYCLE");
}
if (sequence.isNocache()) {
ddl.append(" NOCACHE");
}
if (sequence.isOrder()) {
ddl.append(" ORDER");
}
if (semicolon) {
ddl.append(";");
}
return ddl.toString();
}
}
| apache-2.0 |
sdole/aws-sdk-java | aws-java-sdk-iot/src/main/java/com/amazonaws/services/iotdata/model/ThrottlingException.java | 1104 | /*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.iotdata.model;
import com.amazonaws.AmazonServiceException;
/**
* <p>
* The rate exceeds the limit.
* </p>
*/
public class ThrottlingException extends AmazonServiceException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new ThrottlingException with the specified error message.
*
* @param message
* Describes the error encountered.
*/
public ThrottlingException(String message) {
super(message);
}
} | apache-2.0 |
eurekaclinical/protempa | protempa-framework/src/main/java/org/protempa/KnowledgeSource.java | 6587 | /*
* #%L
* Protempa Framework
* %%
* Copyright (C) 2012 - 2013 Emory University
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.protempa;
import org.protempa.valueset.ValueSet;
import java.util.List;
import java.util.Set;
import org.protempa.backend.KnowledgeSourceBackendUpdatedEvent;
import org.protempa.backend.ksb.KnowledgeSourceBackend;
/**
* @author Andrew Post
*/
public interface KnowledgeSource extends Source<KnowledgeSourceUpdatedEvent, KnowledgeSourceBackend, KnowledgeSourceBackendUpdatedEvent> {
boolean hasAbstractionDefinition(String id) throws KnowledgeSourceReadException;
boolean hasPropositionDefinition(String id) throws KnowledgeSourceReadException;
boolean hasTemporalPropositionDefinition(String id) throws KnowledgeSourceReadException;
boolean hasContextDefinition(String id) throws KnowledgeSourceReadException;
boolean hasValueSet(String id) throws KnowledgeSourceReadException;
Set<PropositionDefinition> collectPropDefDescendantsUsingAllNarrower(boolean inDataSourceOnly, String... propIds) throws KnowledgeSourceReadException;
Set<String> collectPropIdDescendantsUsingAllNarrower(boolean inDataSourceOnly, String... propIds) throws KnowledgeSourceReadException;
Set<PropositionDefinition> collectPropDefDescendantsUsingInverseIsA(String... propIds) throws KnowledgeSourceReadException;
Set<String> collectPropIdDescendantsUsingInverseIsA(String... propIds) throws KnowledgeSourceReadException;
List<PropositionDefinition> readAbstractedFrom(AbstractionDefinition propDef) throws KnowledgeSourceReadException;
List<PropositionDefinition> readAbstractedFrom(String id) throws KnowledgeSourceReadException;
List<AbstractionDefinition> readAbstractedInto(PropositionDefinition propDef) throws KnowledgeSourceReadException;
List<AbstractionDefinition> readAbstractedInto(String propId) throws KnowledgeSourceReadException;
List<String> readAbstractedIntoPropIds(PropositionDefinition propDef) throws KnowledgeSourceReadException;
List<String> readAbstractedIntoPropIds(String id) throws KnowledgeSourceReadException;
AbstractionDefinition readAbstractionDefinition(String id) throws KnowledgeSourceReadException;
ContextDefinition readContextDefinition(String id) throws KnowledgeSourceReadException;
List<PropositionDefinition> readInverseIsA(PropositionDefinition propDef) throws KnowledgeSourceReadException;
List<PropositionDefinition> readInverseIsA(String id) throws KnowledgeSourceReadException;
List<PropositionDefinition> readIsA(PropositionDefinition propDef) throws KnowledgeSourceReadException;
List<PropositionDefinition> readIsA(String id) throws KnowledgeSourceReadException;
List<String> readIsAPropIds(PropositionDefinition propDef) throws KnowledgeSourceReadException;
List<String> readIsAPropIds(String id) throws KnowledgeSourceReadException;
List<ContextDefinition> readSubContexts(String id) throws KnowledgeSourceReadException;
List<ContextDefinition> readSubContexts(ContextDefinition contextDef) throws KnowledgeSourceReadException;
List<ContextDefinition> readSubContextOfs(String id) throws KnowledgeSourceReadException;
List<ContextDefinition> readSubContextOfs(ContextDefinition contextDef) throws KnowledgeSourceReadException;
List<String> readSubContextOfPropIds(String id) throws KnowledgeSourceReadException;
List<String> readSubContextOfPropIds(ContextDefinition contextDef) throws KnowledgeSourceReadException ;
List<ContextDefinition> readInduces(String tempPropDef) throws KnowledgeSourceReadException;
List<ContextDefinition> readInduces(TemporalPropositionDefinition tempPropDef) throws KnowledgeSourceReadException;
List<String> readInducesPropIds(String id) throws KnowledgeSourceReadException;
List<String> readInducesPropIds(TemporalPropositionDefinition tempPropDef) throws KnowledgeSourceReadException;
List<TemporalPropositionDefinition> readInducedBy(String contextId) throws KnowledgeSourceReadException;
List<TemporalPropositionDefinition> readInducedBy(ContextDefinition contextDef) throws KnowledgeSourceReadException;
/**
* Returns the specified proposition definition.
*
* @param id a proposition id {@link String}. Cannot be <code>null</code>.
* @return a {@link PropositionDefinition}, or <code>null</code> if none was
* found with the given <code>id</code>.
* @throws KnowledgeSourceReadException if an error occurred reading from
* the knowledge base.
*/
PropositionDefinition readPropositionDefinition(String id) throws KnowledgeSourceReadException;
TemporalPropositionDefinition readTemporalPropositionDefinition(String id) throws KnowledgeSourceReadException;
ValueSet readValueSet(String id) throws KnowledgeSourceReadException;
List<PropositionDefinition> readParents(PropositionDefinition propDef)
throws KnowledgeSourceReadException;
List<PropositionDefinition> readParents(String propId)
throws KnowledgeSourceReadException;
List<String> readParentPropIds(PropositionDefinition propDef) throws KnowledgeSourceReadException;
List<String> readParentPropIds(String propId) throws KnowledgeSourceReadException;
List<String> getMatchingPropIds(String searchKey) throws KnowledgeSourceReadException;
List<PropositionDefinition> readPropositionDefinitions(String... propIds) throws KnowledgeSourceReadException;
List<AbstractionDefinition> readAbstractionDefinitions(String... propIds) throws KnowledgeSourceReadException;
List<TemporalPropositionDefinition> readTemporalPropositionDefinitions(String... propIds) throws KnowledgeSourceReadException;
List<ContextDefinition> readContextDefinitions(String... propIds) throws KnowledgeSourceReadException;
}
| apache-2.0 |
lizhanhui/data_druid | processing/src/main/java/io/druid/segment/BitmapOffset.java | 8675 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment;
import io.druid.collections.bitmap.BitmapFactory;
import io.druid.collections.bitmap.ImmutableBitmap;
import io.druid.collections.bitmap.MutableBitmap;
import io.druid.collections.bitmap.WrappedImmutableRoaringBitmap;
import io.druid.collections.bitmap.WrappedRoaringBitmap;
import io.druid.extendedset.intset.EmptyIntIterator;
import io.druid.java.util.common.RE;
import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import io.druid.segment.data.Offset;
import io.druid.segment.data.RoaringBitmapSerdeFactory;
import org.roaringbitmap.IntIterator;
import java.util.Arrays;
import java.util.HashSet;
/**
*/
public class BitmapOffset extends Offset
{
private static final int INVALID_VALUE = -1;
private static final BitmapFactory ROARING_BITMAP_FACTORY = new RoaringBitmapSerdeFactory(false).getBitmapFactory();
/**
* Currently the default stops are not consciously optimized for the goals described in {@link #factorizeFullness}.
* They are chosen intuitively. There was no experimentation with different bitmapFullnessFactorizationStops.
* Experimentation and performance feedback with a different set of stops is welcome.
*/
private static final String DEFAULT_FULLNESS_FACTORIZATION_STOPS = "0.01,0.1,0.3,0.5,0.7,0.9,0.99";
private static final double[] BITMAP_FULLNESS_FACTORIZATION_STOPS;
private static final String[] FACTORIZED_FULLNESS;
static {
String stopString = System.getProperty("bitmapFullnessFactorizationStops", DEFAULT_FULLNESS_FACTORIZATION_STOPS);
String[] stopsArray = stopString.split(",");
if (stopsArray.length == 0) {
throw new RE("Empty bitmapFullnessFactorizationStops: " + stopString);
}
if (new HashSet<>(Arrays.asList(stopsArray)).size() != stopsArray.length) {
throw new RE("Non unique bitmapFullnessFactorizationStops: " + stopString);
}
BITMAP_FULLNESS_FACTORIZATION_STOPS = new double[stopsArray.length];
for (int i = 0; i < stopsArray.length; i++) {
String stop = stopsArray[i];
BITMAP_FULLNESS_FACTORIZATION_STOPS[i] = Double.parseDouble(stop);
}
Arrays.sort(BITMAP_FULLNESS_FACTORIZATION_STOPS);
double firstStop = BITMAP_FULLNESS_FACTORIZATION_STOPS[0];
if (Double.isNaN(firstStop) || firstStop <= 0.0) {
throw new RE("First bitmapFullnessFactorizationStop[%d] should be > 0", firstStop);
}
double lastStop = BITMAP_FULLNESS_FACTORIZATION_STOPS[stopsArray.length - 1];
if (Double.isNaN(lastStop) || lastStop >= 1) {
throw new RE("Last bitmapFullnessFactorizationStop[%d] should be < 1", lastStop);
}
String prevStop = "0";
FACTORIZED_FULLNESS = new String[stopsArray.length + 1];
for (int i = 0; i < stopsArray.length; i++) {
String stop = String.valueOf(BITMAP_FULLNESS_FACTORIZATION_STOPS[i]);
FACTORIZED_FULLNESS[i] = "(" + prevStop + ", " + stop + "]";
prevStop = stop;
}
FACTORIZED_FULLNESS[stopsArray.length] = "(" + prevStop + ", 1)";
}
/**
* Processing of queries with BitmapOffsets, whose Bitmaps has different factorized fullness (bucket), reported from
* this method, uses different copies of the same code, so JIT compiler analyzes and compiles the code for different
* factorized fullness separately. The goal is to capture frequency of abstraction usage in compressed bitmap
* algorithms, i. e.
* - "Zero sequence" vs. "Literal" vs. "One sequence" in {@link io.druid.extendedset.intset.ImmutableConciseSet}
* - {@link org.roaringbitmap.ArrayContainer} vs {@link org.roaringbitmap.BitmapContainer} in Roaring
* and then https://shipilev.net/blog/2015/black-magic-method-dispatch/ comes into play. The secondary goal is to
* capture HotSpot's thresholds, which it uses to compile conditional blocks differently inside bitmap impls. See
* https://bugs.openjdk.java.net/browse/JDK-6743900. The default BlockLayoutMinDiamondPercentage=20, i. e. if
* probability of taking some branch is less than 20%, it is moved out of the hot path (to save some icache?).
*
* On the other hand, we don't want to factor fullness into too small pieces, because
* - too little queries may fall into those small buckets, and they are not compiled with Hotspot's C2 compiler
* - if there are a lot of queries for each small factorized fullness and their copies of the code is compiled by
* C2, this pollutes code cache and takes time to perform too many compilations, while some of them likely produce
* identical code.
*
* Ideally there should be as much buckets as possible as long as Hotspot's C2 output for each bucket is different.
*/
private static String factorizeFullness(long bitmapCardinality, long numRows)
{
if (bitmapCardinality == 0) {
return "0";
} else if (bitmapCardinality == numRows) {
return "1";
} else {
double fullness = bitmapCardinality / (double) numRows;
int index = Arrays.binarySearch(BITMAP_FULLNESS_FACTORIZATION_STOPS, fullness);
if (index < 0) {
index = ~index;
}
return FACTORIZED_FULLNESS[index];
}
}
final IntIterator itr;
final String fullness;
int val;
public static IntIterator getReverseBitmapOffsetIterator(ImmutableBitmap bitmapIndex)
{
ImmutableBitmap roaringBitmap = bitmapIndex;
if (!(bitmapIndex instanceof WrappedImmutableRoaringBitmap)) {
final MutableBitmap bitmap = ROARING_BITMAP_FACTORY.makeEmptyMutableBitmap();
final IntIterator iterator = bitmapIndex.iterator();
while (iterator.hasNext()) {
bitmap.add(iterator.next());
}
roaringBitmap = ROARING_BITMAP_FACTORY.makeImmutableBitmap(bitmap);
}
return ((WrappedImmutableRoaringBitmap) roaringBitmap).getBitmap().getReverseIntIterator();
}
public static BitmapOffset of(ImmutableBitmap bitmapIndex, boolean descending, long numRows)
{
if (bitmapIndex instanceof WrappedImmutableRoaringBitmap ||
bitmapIndex instanceof WrappedRoaringBitmap ||
descending) {
return new RoaringBitmapOffset(bitmapIndex, descending, numRows);
} else {
return new BitmapOffset(bitmapIndex, descending, numRows);
}
}
private BitmapOffset(ImmutableBitmap bitmapIndex, boolean descending, long numRows)
{
this.itr = newIterator(bitmapIndex, descending);
this.fullness = factorizeFullness(bitmapIndex.size(), numRows);
increment();
}
private IntIterator newIterator(ImmutableBitmap bitmapIndex, boolean descending)
{
if (!descending) {
return bitmapIndex.iterator();
} else {
return getReverseBitmapOffsetIterator(bitmapIndex);
}
}
private BitmapOffset(String fullness, IntIterator itr, int val)
{
this.fullness = fullness;
this.itr = itr;
this.val = val;
}
@Override
public void increment()
{
if (itr.hasNext()) {
val = itr.next();
} else {
val = INVALID_VALUE;
}
}
@Override
public boolean withinBounds()
{
return val > INVALID_VALUE;
}
@Override
public Offset clone()
{
return new BitmapOffset(fullness, itr.clone(), val);
}
@Override
public int getOffset()
{
return val;
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector)
{
inspector.visit("itr", itr);
inspector.visit("fullness", fullness);
}
public static class RoaringBitmapOffset extends BitmapOffset
{
public RoaringBitmapOffset(ImmutableBitmap bitmapIndex, boolean descending, long numRows)
{
super(bitmapIndex, descending, numRows);
}
RoaringBitmapOffset(String fullness, IntIterator itr, int val)
{
super(fullness, itr, val);
}
@Override
public Offset clone()
{
return new RoaringBitmapOffset(fullness, itr.hasNext() ? itr.clone() : EmptyIntIterator.instance(), val);
}
}
}
| apache-2.0 |
saleeh93/buck-cutom | src/com/facebook/buck/httpserver/TraceDataHandler.java | 3302 | /*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.httpserver;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.io.CharStreams;
import com.google.common.net.MediaType;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.handler.AbstractHandler;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Writer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* HTTP handler for requests to the {@code /tracedata} path.
*/
class TraceDataHandler extends AbstractHandler {
static final Pattern ID_PATTERN = Pattern.compile("/([0-9a-zA-Z-]+)");
@VisibleForTesting
static final Pattern CALLBACK_PATTERN = Pattern.compile("[\\w\\.]+");
private final TracesHelper tracesHelper;
TraceDataHandler(TracesHelper tracesHelper) {
this.tracesHelper = Preconditions.checkNotNull(tracesHelper);
}
@Override
public void handle(String target,
Request baseRequest,
HttpServletRequest request,
HttpServletResponse response)
throws IOException, ServletException {
if ("GET".equals(baseRequest.getMethod())) {
doGet(baseRequest, response);
} else {
Responses.writeFailedResponse(baseRequest, response);
}
}
private void doGet(Request baseRequest, HttpServletResponse response)
throws ServletException, IOException {
String path = baseRequest.getPathInfo();
Matcher matcher = ID_PATTERN.matcher(path);
if (!matcher.matches()) {
Responses.writeFailedResponse(baseRequest, response);
return;
}
String id = matcher.group(1);
response.setContentType(MediaType.JAVASCRIPT_UTF_8.toString());
response.setStatus(HttpServletResponse.SC_OK);
boolean hasValidCallbackParam = false;
Writer responseWriter = response.getWriter();
String callback = baseRequest.getParameter("callback");
if (callback != null) {
Matcher callbackMatcher = CALLBACK_PATTERN.matcher(callback);
if (callbackMatcher.matches()) {
hasValidCallbackParam = true;
responseWriter.write(callback);
responseWriter.write("(");
}
}
try (
InputStream input = tracesHelper.getInputForTrace(id);
InputStreamReader inputStreamReader = new InputStreamReader(input)) {
CharStreams.copy(inputStreamReader, responseWriter);
}
if (hasValidCallbackParam) {
responseWriter.write(");\n");
}
response.flushBuffer();
baseRequest.setHandled(true);
}
}
| apache-2.0 |
roshanp/lucene-hdfs-directory | src/main/java/org/apache/blur/store/blockcache_v2/Size.java | 937 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.blur.store.blockcache_v2;
public interface Size {
int getSize(CacheDirectory directory, String fileName);
}
| apache-2.0 |
teetime-framework/teetime | src/main/java/teetime/stage/taskfarm/StaticTaskFarmStage.java | 5451 | /**
* Copyright © 2015 Christian Wulf, Nelson Tavares de Sousa (http://teetime-framework.github.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package teetime.stage.taskfarm;
import java.util.ArrayList;
import java.util.List;
import teetime.framework.CompositeStage;
import teetime.framework.InputPort;
import teetime.framework.OutputPort;
import teetime.stage.basic.ITransformation;
import teetime.stage.basic.distributor.Distributor;
import teetime.stage.basic.merger.Merger;
/**
* Represents the task farm parallelization pattern in TeeTime.
*
* @author Christian Claus Wiechmann, Christian Wulf
*
* @param <I>
* Input type of Task Farm
* @param <O>
* Output type of Task Farm
* @param <T>
* Type of the parallelized stage
*/
public class StaticTaskFarmStage<I, O, T extends ITaskFarmDuplicable<I, O>> extends CompositeStage implements ITransformation<I, O> {
private static final int MAX_NUMBER_OF_STAGES = Runtime.getRuntime().availableProcessors();
private final Distributor<I> distributor;
private final Merger<O> merger;
/** List of all currently existing worker stages */
private final List<ITaskFarmDuplicable<I, O>> workerStages;
private InputPort<I> inputPort;
private OutputPort<O> outputPort;
/**
* Creates a task farm stage with <i>n</i> worker stages and a pipe capacity of {@value #DEFAULT_PIPE_CAPACITY}, where <i>n</i>
* is
*
* <pre>
* Runtime.getRuntime().availableProcessors()
* </pre>
*
* @param workerStage
*/
public StaticTaskFarmStage(final T workerStage) {
this(workerStage, MAX_NUMBER_OF_STAGES, DEFAULT_PIPE_CAPACITY);
}
public StaticTaskFarmStage(final T workerStage, final int numberStages) {
this(workerStage, numberStages, DEFAULT_PIPE_CAPACITY);
}
public StaticTaskFarmStage(final T workerStage, final int numberStages, final int pipeCapacity) {
this(workerStage, numberStages, pipeCapacity, new Distributor<I>(), new Merger<O>());
}
protected StaticTaskFarmStage(final T workerStage, final int numberStages, final int pipeCapacity, final Distributor<I> distributor, final Merger<O> merger) {
super();
if (null == workerStage) {
throw new IllegalArgumentException("The constructor of a Task Farm may not be called with null as the worker stage.");
}
if (numberStages < 1) {
throw new IllegalArgumentException("The number of worker stages must be at least 1.");
}
if (pipeCapacity < 1) {
throw new IllegalArgumentException("The capacity of the pipe(s) must be at least 1.");
}
this.distributor = distributor;
this.merger = merger;
this.workerStages = new ArrayList<ITaskFarmDuplicable<I, O>>();
this.init(workerStage, numberStages, pipeCapacity);
}
private void init(final T workerStage, final int numberStages, final int pipeCapacity) {
connectWorkerStage(workerStage, pipeCapacity);
workerStage.getInputPort().getOwningStage().declareActive();
for (int i = 1; i < numberStages; i++) {
ITaskFarmDuplicable<I, O> duplicatedWorkerStage = workerStage.duplicate();
connectWorkerStage(duplicatedWorkerStage, pipeCapacity);
duplicatedWorkerStage.getInputPort().getOwningStage().declareActive();
}
if (numberStages > 1) {
this.merger.declareActive();
}
// map outer ports to inner ports
inputPort = createInputPort(this.distributor.getInputPort());
outputPort = createOutputPort(this.merger.getOutputPort());
}
private void connectWorkerStage(final ITaskFarmDuplicable<I, O> workerStage, final int pipeCapacity) {
final InputPort<I> stageInputPort = workerStage.getInputPort();
connectPorts(this.distributor.getNewOutputPort(), stageInputPort, pipeCapacity);
final OutputPort<O> stageOutputPort = workerStage.getOutputPort();
connectPorts(stageOutputPort, this.merger.getNewInputPort(), pipeCapacity);
this.workerStages.add(workerStage);
}
/**
* Returns the input port of the task farm/distributor of the task farm.
*
* @return input port of the task farm
*/
@Override
public InputPort<I> getInputPort() {
return inputPort;
}
/**
* Returns the output port of the task farm/merger of the task farm.
*
* @return output port of the task farm
*/
@Override
public OutputPort<O> getOutputPort() {
return outputPort;
}
// /**
// * Declares the internal distributor to be executed by an own thread.
// */
// @Override
// public void declareActive() {
// distributor.declareActive();
// }
//
// @Override
// public StageState getCurrentState() {
// return distributor.getCurrentState();
// }
/* default */ Distributor<I> getDistributor() {
return distributor;
}
/* default */ Merger<O> getMerger() {
return merger;
}
protected int getPipeCapacity() {
return distributor.getOutputPorts().get(0).getPipe().capacity();
}
/**
* @return a list of all currently existing worker stages
*/
public List<ITaskFarmDuplicable<I, O>> getWorkerStages() {
return workerStages;
}
}
| apache-2.0 |
CloudScale-Project/DynamicSpotter | org.spotter.core/src/org/spotter/core/instrumentation/InstrumentationBroker.java | 7375 | /**
* Copyright 2014 SAP AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.spotter.core.instrumentation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.aim.api.exceptions.InstrumentationException;
import org.aim.description.InstrumentationDescription;
import org.lpe.common.extension.IExtension;
import org.lpe.common.util.system.LpeSystemUtils;
/**
* The instrumentation broker manages the distribution of instrumentation
* commands.
*
* @author Alexander Wert
*
*/
public final class InstrumentationBroker implements IInstrumentationAdapter {
private static InstrumentationBroker instance;
/**
*
* @return singleton instance
*/
public static synchronized InstrumentationBroker getInstance() {
if (instance == null) {
instance = new InstrumentationBroker();
}
return instance;
}
private final List<IInstrumentationAdapter> instrumentationControllers;
/**
* Constructor.
*
* @param instrumentationControllers
* instrumentation controllers to manage
*/
private InstrumentationBroker() {
this.instrumentationControllers = new ArrayList<IInstrumentationAdapter>();
}
/**
* sets a collection of controllers.
*
* @param instrumentationControllers
* controllers
*/
public void setControllers(Collection<IInstrumentationAdapter> instrumentationControllers) {
this.instrumentationControllers.clear();
this.instrumentationControllers.addAll(instrumentationControllers);
}
@Override
public void initialize() throws InstrumentationException {
try {
List<Future<?>> tasks = new ArrayList<>();
for (IInstrumentationAdapter instController : instrumentationControllers) {
tasks.add(LpeSystemUtils.submitTask(new InitializeTask(instController)));
}
// wait for termination of all initialization tasks
for (Future<?> task : tasks) {
task.get();
}
} catch (InterruptedException | ExecutionException e) {
throw new InstrumentationException(e);
}
}
@Override
public void instrument(InstrumentationDescription description) throws InstrumentationException {
try {
if (description == null) {
throw new InstrumentationException("Instrumentation description must not be null!");
}
List<Future<?>> tasks = new ArrayList<>();
for (IInstrumentationAdapter instController : instrumentationControllers) {
tasks.add(LpeSystemUtils.submitTask(new InstrumentTask(instController, description)));
}
// wait for termination of all instrumentation tasks
for (Future<?> task : tasks) {
task.get();
}
} catch (InterruptedException | ExecutionException e) {
throw new InstrumentationException(e);
}
}
@Override
public void uninstrument() throws InstrumentationException {
try {
List<Future<?>> tasks = new ArrayList<>();
for (IInstrumentationAdapter instController : instrumentationControllers) {
tasks.add(LpeSystemUtils.submitTask(new UninstrumentTask(instController)));
}
// wait for termination of all uninstrumentation tasks
for (Future<?> task : tasks) {
task.get();
}
} catch (InterruptedException | ExecutionException e) {
throw new InstrumentationException(e);
}
}
@Override
public Properties getProperties() {
Properties props = new Properties();
for (IInstrumentationAdapter instController : instrumentationControllers) {
props.putAll(instController.getProperties());
}
return props;
}
@Override
public IExtension<?> getProvider() {
return null;
}
private abstract class Task implements Runnable {
@Override
public void run() {
try {
executeTask();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected abstract void executeTask() throws InstrumentationException;
}
private class InstrumentTask extends Task {
IInstrumentationAdapter instController;
InstrumentationDescription description;
public InstrumentTask(IInstrumentationAdapter instController, InstrumentationDescription description)
throws InterruptedException {
this.instController = instController;
this.description = description;
}
@Override
protected void executeTask() throws InstrumentationException {
String csListIncludes = instController.getProperties().getProperty(
IInstrumentationAdapter.INSTRUMENTATION_INCLUDES);
csListIncludes = (csListIncludes == null || csListIncludes.isEmpty()) ? null : csListIncludes;
if (csListIncludes != null) {
String[] includesArr = csListIncludes.split(",");
for (String inc : includesArr) {
description.getGlobalRestriction().getPackageIncludes().add(inc);
}
}
String csListExcludes = instController.getProperties().getProperty(
IInstrumentationAdapter.INSTRUMENTATION_EXCLUDES);
csListExcludes = (csListExcludes == null || csListExcludes.isEmpty()) ? null : csListExcludes;
if (csListExcludes != null) {
String[] excludesArr = csListExcludes.split(",");
for (String exc : excludesArr) {
description.getGlobalRestriction().getPackageExcludes().add(exc);
}
}
instController.instrument(description);
}
}
private class UninstrumentTask extends Task {
IInstrumentationAdapter instController;
public UninstrumentTask(IInstrumentationAdapter instController) throws InterruptedException {
this.instController = instController;
}
@Override
protected void executeTask() throws InstrumentationException {
instController.uninstrument();
}
}
private class InitializeTask extends Task {
private IInstrumentationAdapter instController;
public InitializeTask(IInstrumentationAdapter instController) throws InterruptedException {
this.instController = instController;
}
@Override
protected void executeTask() throws InstrumentationException {
instController.initialize();
}
}
@Override
public String getName() {
return "Instrumentation Broker";
}
@Override
public String getPort() {
return "NA";
}
@Override
public String getHost() {
return "localhost";
}
@Override
public void setProperties(Properties properties) {
// nothing to do
}
/**
* Returns a list of instrumentation controllers of the given type.
*
* @param type
* type of interest
* @return list of instrumentation controllers of the given type
* @param <T>
* Class type of the controllers
*/
@SuppressWarnings("unchecked")
public <T extends IInstrumentationAdapter> List<T> getInstrumentationControllers(Class<T> type) {
List<T> result = new ArrayList<>();
for (IInstrumentationAdapter controller : instrumentationControllers) {
if (type.isAssignableFrom(controller.getClass())) {
result.add((T) controller);
}
}
return result;
}
}
| apache-2.0 |
gustavoanatoly/hbase | hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java | 699245 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Filter.proto
package org.apache.hadoop.hbase.protobuf.generated;
public final class FilterProtos {
private FilterProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface FilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string name = 1;
/**
* <code>required string name = 1;</code>
*/
boolean hasName();
/**
* <code>required string name = 1;</code>
*/
java.lang.String getName();
/**
* <code>required string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
// optional bytes serialized_filter = 2;
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
boolean hasSerializedFilter();
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
com.google.protobuf.ByteString getSerializedFilter();
}
/**
* Protobuf type {@code hbase.pb.Filter}
*/
public static final class Filter extends
com.google.protobuf.GeneratedMessage
implements FilterOrBuilder {
// Use Filter.newBuilder() to construct.
private Filter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Filter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Filter defaultInstance;
public static Filter getDefaultInstance() {
return defaultInstance;
}
public Filter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Filter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
name_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
serializedFilter_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class);
}
public static com.google.protobuf.Parser<Filter> PARSER =
new com.google.protobuf.AbstractParser<Filter>() {
public Filter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Filter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Filter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private java.lang.Object name_;
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bytes serialized_filter = 2;
public static final int SERIALIZED_FILTER_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString serializedFilter_;
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
public boolean hasSerializedFilter() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
public com.google.protobuf.ByteString getSerializedFilter() {
return serializedFilter_;
}
private void initFields() {
name_ = "";
serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, serializedFilter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, serializedFilter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) obj;
boolean result = true;
result = result && (hasName() == other.hasName());
if (hasName()) {
result = result && getName()
.equals(other.getName());
}
result = result && (hasSerializedFilter() == other.hasSerializedFilter());
if (hasSerializedFilter()) {
result = result && getSerializedFilter()
.equals(other.getSerializedFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasName()) {
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
}
if (hasSerializedFilter()) {
hash = (37 * hash) + SERIALIZED_FILTER_FIELD_NUMBER;
hash = (53 * hash) + getSerializedFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Filter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
name_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.name_ = name_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.serializedFilter_ = serializedFilter_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) return this;
if (other.hasName()) {
bitField0_ |= 0x00000001;
name_ = other.name_;
onChanged();
}
if (other.hasSerializedFilter()) {
setSerializedFilter(other.getSerializedFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string name = 1;
private java.lang.Object name_ = "";
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
// optional bytes serialized_filter = 2;
private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
public boolean hasSerializedFilter() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
public com.google.protobuf.ByteString getSerializedFilter() {
return serializedFilter_;
}
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
public Builder setSerializedFilter(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
serializedFilter_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes serialized_filter = 2;</code>
*/
public Builder clearSerializedFilter() {
bitField0_ = (bitField0_ & ~0x00000002);
serializedFilter_ = getDefaultInstance().getSerializedFilter();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Filter)
}
static {
defaultInstance = new Filter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Filter)
}
public interface ColumnCountGetFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int32 limit = 1;
/**
* <code>required int32 limit = 1;</code>
*/
boolean hasLimit();
/**
* <code>required int32 limit = 1;</code>
*/
int getLimit();
}
/**
* Protobuf type {@code hbase.pb.ColumnCountGetFilter}
*/
public static final class ColumnCountGetFilter extends
com.google.protobuf.GeneratedMessage
implements ColumnCountGetFilterOrBuilder {
// Use ColumnCountGetFilter.newBuilder() to construct.
private ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ColumnCountGetFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ColumnCountGetFilter defaultInstance;
public static ColumnCountGetFilter getDefaultInstance() {
return defaultInstance;
}
public ColumnCountGetFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnCountGetFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
limit_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class);
}
public static com.google.protobuf.Parser<ColumnCountGetFilter> PARSER =
new com.google.protobuf.AbstractParser<ColumnCountGetFilter>() {
public ColumnCountGetFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ColumnCountGetFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ColumnCountGetFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int32 limit = 1;
public static final int LIMIT_FIELD_NUMBER = 1;
private int limit_;
/**
* <code>required int32 limit = 1;</code>
*/
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 limit = 1;</code>
*/
public int getLimit() {
return limit_;
}
private void initFields() {
limit_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasLimit()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, limit_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, limit_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj;
boolean result = true;
result = result && (hasLimit() == other.hasLimit());
if (hasLimit()) {
result = result && (getLimit()
== other.getLimit());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasLimit()) {
hash = (37 * hash) + LIMIT_FIELD_NUMBER;
hash = (53 * hash) + getLimit();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ColumnCountGetFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
limit_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.limit_ = limit_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this;
if (other.hasLimit()) {
setLimit(other.getLimit());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasLimit()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required int32 limit = 1;
private int limit_ ;
/**
* <code>required int32 limit = 1;</code>
*/
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 limit = 1;</code>
*/
public int getLimit() {
return limit_;
}
/**
* <code>required int32 limit = 1;</code>
*/
public Builder setLimit(int value) {
bitField0_ |= 0x00000001;
limit_ = value;
onChanged();
return this;
}
/**
* <code>required int32 limit = 1;</code>
*/
public Builder clearLimit() {
bitField0_ = (bitField0_ & ~0x00000001);
limit_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ColumnCountGetFilter)
}
static {
defaultInstance = new ColumnCountGetFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ColumnCountGetFilter)
}
public interface ColumnPaginationFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int32 limit = 1;
/**
* <code>required int32 limit = 1;</code>
*/
boolean hasLimit();
/**
* <code>required int32 limit = 1;</code>
*/
int getLimit();
// optional int32 offset = 2;
/**
* <code>optional int32 offset = 2;</code>
*/
boolean hasOffset();
/**
* <code>optional int32 offset = 2;</code>
*/
int getOffset();
// optional bytes column_offset = 3;
/**
* <code>optional bytes column_offset = 3;</code>
*/
boolean hasColumnOffset();
/**
* <code>optional bytes column_offset = 3;</code>
*/
com.google.protobuf.ByteString getColumnOffset();
}
/**
* Protobuf type {@code hbase.pb.ColumnPaginationFilter}
*/
public static final class ColumnPaginationFilter extends
com.google.protobuf.GeneratedMessage
implements ColumnPaginationFilterOrBuilder {
// Use ColumnPaginationFilter.newBuilder() to construct.
private ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ColumnPaginationFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ColumnPaginationFilter defaultInstance;
public static ColumnPaginationFilter getDefaultInstance() {
return defaultInstance;
}
public ColumnPaginationFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnPaginationFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
limit_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
offset_ = input.readInt32();
break;
}
case 26: {
bitField0_ |= 0x00000004;
columnOffset_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class);
}
public static com.google.protobuf.Parser<ColumnPaginationFilter> PARSER =
new com.google.protobuf.AbstractParser<ColumnPaginationFilter>() {
public ColumnPaginationFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ColumnPaginationFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ColumnPaginationFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int32 limit = 1;
public static final int LIMIT_FIELD_NUMBER = 1;
private int limit_;
/**
* <code>required int32 limit = 1;</code>
*/
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 limit = 1;</code>
*/
public int getLimit() {
return limit_;
}
// optional int32 offset = 2;
public static final int OFFSET_FIELD_NUMBER = 2;
private int offset_;
/**
* <code>optional int32 offset = 2;</code>
*/
public boolean hasOffset() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int32 offset = 2;</code>
*/
public int getOffset() {
return offset_;
}
// optional bytes column_offset = 3;
public static final int COLUMN_OFFSET_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString columnOffset_;
/**
* <code>optional bytes column_offset = 3;</code>
*/
public boolean hasColumnOffset() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes column_offset = 3;</code>
*/
public com.google.protobuf.ByteString getColumnOffset() {
return columnOffset_;
}
private void initFields() {
limit_ = 0;
offset_ = 0;
columnOffset_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasLimit()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, limit_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt32(2, offset_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, columnOffset_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, limit_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, offset_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, columnOffset_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj;
boolean result = true;
result = result && (hasLimit() == other.hasLimit());
if (hasLimit()) {
result = result && (getLimit()
== other.getLimit());
}
result = result && (hasOffset() == other.hasOffset());
if (hasOffset()) {
result = result && (getOffset()
== other.getOffset());
}
result = result && (hasColumnOffset() == other.hasColumnOffset());
if (hasColumnOffset()) {
result = result && getColumnOffset()
.equals(other.getColumnOffset());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasLimit()) {
hash = (37 * hash) + LIMIT_FIELD_NUMBER;
hash = (53 * hash) + getLimit();
}
if (hasOffset()) {
hash = (37 * hash) + OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getOffset();
}
if (hasColumnOffset()) {
hash = (37 * hash) + COLUMN_OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getColumnOffset().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ColumnPaginationFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
limit_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
offset_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
columnOffset_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.limit_ = limit_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.offset_ = offset_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.columnOffset_ = columnOffset_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this;
if (other.hasLimit()) {
setLimit(other.getLimit());
}
if (other.hasOffset()) {
setOffset(other.getOffset());
}
if (other.hasColumnOffset()) {
setColumnOffset(other.getColumnOffset());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasLimit()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required int32 limit = 1;
private int limit_ ;
/**
* <code>required int32 limit = 1;</code>
*/
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 limit = 1;</code>
*/
public int getLimit() {
return limit_;
}
/**
* <code>required int32 limit = 1;</code>
*/
public Builder setLimit(int value) {
bitField0_ |= 0x00000001;
limit_ = value;
onChanged();
return this;
}
/**
* <code>required int32 limit = 1;</code>
*/
public Builder clearLimit() {
bitField0_ = (bitField0_ & ~0x00000001);
limit_ = 0;
onChanged();
return this;
}
// optional int32 offset = 2;
private int offset_ ;
/**
* <code>optional int32 offset = 2;</code>
*/
public boolean hasOffset() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int32 offset = 2;</code>
*/
public int getOffset() {
return offset_;
}
/**
* <code>optional int32 offset = 2;</code>
*/
public Builder setOffset(int value) {
bitField0_ |= 0x00000002;
offset_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 offset = 2;</code>
*/
public Builder clearOffset() {
bitField0_ = (bitField0_ & ~0x00000002);
offset_ = 0;
onChanged();
return this;
}
// optional bytes column_offset = 3;
private com.google.protobuf.ByteString columnOffset_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes column_offset = 3;</code>
*/
public boolean hasColumnOffset() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes column_offset = 3;</code>
*/
public com.google.protobuf.ByteString getColumnOffset() {
return columnOffset_;
}
/**
* <code>optional bytes column_offset = 3;</code>
*/
public Builder setColumnOffset(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
columnOffset_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes column_offset = 3;</code>
*/
public Builder clearColumnOffset() {
bitField0_ = (bitField0_ & ~0x00000004);
columnOffset_ = getDefaultInstance().getColumnOffset();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ColumnPaginationFilter)
}
static {
defaultInstance = new ColumnPaginationFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ColumnPaginationFilter)
}
public interface ColumnPrefixFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes prefix = 1;
/**
* <code>required bytes prefix = 1;</code>
*/
boolean hasPrefix();
/**
* <code>required bytes prefix = 1;</code>
*/
com.google.protobuf.ByteString getPrefix();
}
/**
* Protobuf type {@code hbase.pb.ColumnPrefixFilter}
*/
public static final class ColumnPrefixFilter extends
com.google.protobuf.GeneratedMessage
implements ColumnPrefixFilterOrBuilder {
// Use ColumnPrefixFilter.newBuilder() to construct.
private ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ColumnPrefixFilter defaultInstance;
public static ColumnPrefixFilter getDefaultInstance() {
return defaultInstance;
}
public ColumnPrefixFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnPrefixFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
prefix_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class);
}
public static com.google.protobuf.Parser<ColumnPrefixFilter> PARSER =
new com.google.protobuf.AbstractParser<ColumnPrefixFilter>() {
public ColumnPrefixFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ColumnPrefixFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ColumnPrefixFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes prefix = 1;
public static final int PREFIX_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString prefix_;
/**
* <code>required bytes prefix = 1;</code>
*/
public boolean hasPrefix() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes prefix = 1;</code>
*/
public com.google.protobuf.ByteString getPrefix() {
return prefix_;
}
private void initFields() {
prefix_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasPrefix()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, prefix_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, prefix_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj;
boolean result = true;
result = result && (hasPrefix() == other.hasPrefix());
if (hasPrefix()) {
result = result && getPrefix()
.equals(other.getPrefix());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasPrefix()) {
hash = (37 * hash) + PREFIX_FIELD_NUMBER;
hash = (53 * hash) + getPrefix().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ColumnPrefixFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
prefix_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.prefix_ = prefix_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this;
if (other.hasPrefix()) {
setPrefix(other.getPrefix());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasPrefix()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes prefix = 1;
private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes prefix = 1;</code>
*/
public boolean hasPrefix() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes prefix = 1;</code>
*/
public com.google.protobuf.ByteString getPrefix() {
return prefix_;
}
/**
* <code>required bytes prefix = 1;</code>
*/
public Builder setPrefix(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
prefix_ = value;
onChanged();
return this;
}
/**
* <code>required bytes prefix = 1;</code>
*/
public Builder clearPrefix() {
bitField0_ = (bitField0_ & ~0x00000001);
prefix_ = getDefaultInstance().getPrefix();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ColumnPrefixFilter)
}
static {
defaultInstance = new ColumnPrefixFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ColumnPrefixFilter)
}
public interface ColumnRangeFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes min_column = 1;
/**
* <code>optional bytes min_column = 1;</code>
*/
boolean hasMinColumn();
/**
* <code>optional bytes min_column = 1;</code>
*/
com.google.protobuf.ByteString getMinColumn();
// optional bool min_column_inclusive = 2;
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
boolean hasMinColumnInclusive();
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
boolean getMinColumnInclusive();
// optional bytes max_column = 3;
/**
* <code>optional bytes max_column = 3;</code>
*/
boolean hasMaxColumn();
/**
* <code>optional bytes max_column = 3;</code>
*/
com.google.protobuf.ByteString getMaxColumn();
// optional bool max_column_inclusive = 4;
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
boolean hasMaxColumnInclusive();
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
boolean getMaxColumnInclusive();
}
/**
* Protobuf type {@code hbase.pb.ColumnRangeFilter}
*/
public static final class ColumnRangeFilter extends
com.google.protobuf.GeneratedMessage
implements ColumnRangeFilterOrBuilder {
// Use ColumnRangeFilter.newBuilder() to construct.
private ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ColumnRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ColumnRangeFilter defaultInstance;
public static ColumnRangeFilter getDefaultInstance() {
return defaultInstance;
}
public ColumnRangeFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnRangeFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
minColumn_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
minColumnInclusive_ = input.readBool();
break;
}
case 26: {
bitField0_ |= 0x00000004;
maxColumn_ = input.readBytes();
break;
}
case 32: {
bitField0_ |= 0x00000008;
maxColumnInclusive_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class);
}
public static com.google.protobuf.Parser<ColumnRangeFilter> PARSER =
new com.google.protobuf.AbstractParser<ColumnRangeFilter>() {
public ColumnRangeFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ColumnRangeFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ColumnRangeFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes min_column = 1;
public static final int MIN_COLUMN_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString minColumn_;
/**
* <code>optional bytes min_column = 1;</code>
*/
public boolean hasMinColumn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes min_column = 1;</code>
*/
public com.google.protobuf.ByteString getMinColumn() {
return minColumn_;
}
// optional bool min_column_inclusive = 2;
public static final int MIN_COLUMN_INCLUSIVE_FIELD_NUMBER = 2;
private boolean minColumnInclusive_;
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
public boolean hasMinColumnInclusive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
public boolean getMinColumnInclusive() {
return minColumnInclusive_;
}
// optional bytes max_column = 3;
public static final int MAX_COLUMN_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString maxColumn_;
/**
* <code>optional bytes max_column = 3;</code>
*/
public boolean hasMaxColumn() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes max_column = 3;</code>
*/
public com.google.protobuf.ByteString getMaxColumn() {
return maxColumn_;
}
// optional bool max_column_inclusive = 4;
public static final int MAX_COLUMN_INCLUSIVE_FIELD_NUMBER = 4;
private boolean maxColumnInclusive_;
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
public boolean hasMaxColumnInclusive() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
public boolean getMaxColumnInclusive() {
return maxColumnInclusive_;
}
private void initFields() {
minColumn_ = com.google.protobuf.ByteString.EMPTY;
minColumnInclusive_ = false;
maxColumn_ = com.google.protobuf.ByteString.EMPTY;
maxColumnInclusive_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, minColumn_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(2, minColumnInclusive_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, maxColumn_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBool(4, maxColumnInclusive_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, minColumn_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, minColumnInclusive_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, maxColumn_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, maxColumnInclusive_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) obj;
boolean result = true;
result = result && (hasMinColumn() == other.hasMinColumn());
if (hasMinColumn()) {
result = result && getMinColumn()
.equals(other.getMinColumn());
}
result = result && (hasMinColumnInclusive() == other.hasMinColumnInclusive());
if (hasMinColumnInclusive()) {
result = result && (getMinColumnInclusive()
== other.getMinColumnInclusive());
}
result = result && (hasMaxColumn() == other.hasMaxColumn());
if (hasMaxColumn()) {
result = result && getMaxColumn()
.equals(other.getMaxColumn());
}
result = result && (hasMaxColumnInclusive() == other.hasMaxColumnInclusive());
if (hasMaxColumnInclusive()) {
result = result && (getMaxColumnInclusive()
== other.getMaxColumnInclusive());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasMinColumn()) {
hash = (37 * hash) + MIN_COLUMN_FIELD_NUMBER;
hash = (53 * hash) + getMinColumn().hashCode();
}
if (hasMinColumnInclusive()) {
hash = (37 * hash) + MIN_COLUMN_INCLUSIVE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getMinColumnInclusive());
}
if (hasMaxColumn()) {
hash = (37 * hash) + MAX_COLUMN_FIELD_NUMBER;
hash = (53 * hash) + getMaxColumn().hashCode();
}
if (hasMaxColumnInclusive()) {
hash = (37 * hash) + MAX_COLUMN_INCLUSIVE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getMaxColumnInclusive());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ColumnRangeFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
minColumn_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
minColumnInclusive_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
maxColumn_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
maxColumnInclusive_ = false;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.minColumn_ = minColumn_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.minColumnInclusive_ = minColumnInclusive_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.maxColumn_ = maxColumn_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.maxColumnInclusive_ = maxColumnInclusive_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance()) return this;
if (other.hasMinColumn()) {
setMinColumn(other.getMinColumn());
}
if (other.hasMinColumnInclusive()) {
setMinColumnInclusive(other.getMinColumnInclusive());
}
if (other.hasMaxColumn()) {
setMaxColumn(other.getMaxColumn());
}
if (other.hasMaxColumnInclusive()) {
setMaxColumnInclusive(other.getMaxColumnInclusive());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes min_column = 1;
private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes min_column = 1;</code>
*/
public boolean hasMinColumn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes min_column = 1;</code>
*/
public com.google.protobuf.ByteString getMinColumn() {
return minColumn_;
}
/**
* <code>optional bytes min_column = 1;</code>
*/
public Builder setMinColumn(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
minColumn_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes min_column = 1;</code>
*/
public Builder clearMinColumn() {
bitField0_ = (bitField0_ & ~0x00000001);
minColumn_ = getDefaultInstance().getMinColumn();
onChanged();
return this;
}
// optional bool min_column_inclusive = 2;
private boolean minColumnInclusive_ ;
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
public boolean hasMinColumnInclusive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
public boolean getMinColumnInclusive() {
return minColumnInclusive_;
}
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
public Builder setMinColumnInclusive(boolean value) {
bitField0_ |= 0x00000002;
minColumnInclusive_ = value;
onChanged();
return this;
}
/**
* <code>optional bool min_column_inclusive = 2;</code>
*/
public Builder clearMinColumnInclusive() {
bitField0_ = (bitField0_ & ~0x00000002);
minColumnInclusive_ = false;
onChanged();
return this;
}
// optional bytes max_column = 3;
private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes max_column = 3;</code>
*/
public boolean hasMaxColumn() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes max_column = 3;</code>
*/
public com.google.protobuf.ByteString getMaxColumn() {
return maxColumn_;
}
/**
* <code>optional bytes max_column = 3;</code>
*/
public Builder setMaxColumn(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
maxColumn_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes max_column = 3;</code>
*/
public Builder clearMaxColumn() {
bitField0_ = (bitField0_ & ~0x00000004);
maxColumn_ = getDefaultInstance().getMaxColumn();
onChanged();
return this;
}
// optional bool max_column_inclusive = 4;
private boolean maxColumnInclusive_ ;
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
public boolean hasMaxColumnInclusive() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
public boolean getMaxColumnInclusive() {
return maxColumnInclusive_;
}
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
public Builder setMaxColumnInclusive(boolean value) {
bitField0_ |= 0x00000008;
maxColumnInclusive_ = value;
onChanged();
return this;
}
/**
* <code>optional bool max_column_inclusive = 4;</code>
*/
public Builder clearMaxColumnInclusive() {
bitField0_ = (bitField0_ & ~0x00000008);
maxColumnInclusive_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ColumnRangeFilter)
}
static {
defaultInstance = new ColumnRangeFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ColumnRangeFilter)
}
public interface CompareFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.CompareType compare_op = 1;
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
boolean hasCompareOp();
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp();
// optional .hbase.pb.Comparator comparator = 2;
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
boolean hasComparator();
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.CompareFilter}
*/
public static final class CompareFilter extends
com.google.protobuf.GeneratedMessage
implements CompareFilterOrBuilder {
// Use CompareFilter.newBuilder() to construct.
private CompareFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CompareFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CompareFilter defaultInstance;
public static CompareFilter getDefaultInstance() {
return defaultInstance;
}
public CompareFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CompareFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
compareOp_ = value;
}
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = comparator_.toBuilder();
}
comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(comparator_);
comparator_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class);
}
public static com.google.protobuf.Parser<CompareFilter> PARSER =
new com.google.protobuf.AbstractParser<CompareFilter>() {
public CompareFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CompareFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CompareFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.CompareType compare_op = 1;
public static final int COMPARE_OP_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_;
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
public boolean hasCompareOp() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
return compareOp_;
}
// optional .hbase.pb.Comparator comparator = 2;
public static final int COMPARATOR_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public boolean hasComparator() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
return comparator_;
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
return comparator_;
}
private void initFields() {
compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCompareOp()) {
memoizedIsInitialized = 0;
return false;
}
if (hasComparator()) {
if (!getComparator().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, compareOp_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, comparator_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, compareOp_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, comparator_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) obj;
boolean result = true;
result = result && (hasCompareOp() == other.hasCompareOp());
if (hasCompareOp()) {
result = result &&
(getCompareOp() == other.getCompareOp());
}
result = result && (hasComparator() == other.hasComparator());
if (hasComparator()) {
result = result && getComparator()
.equals(other.getComparator());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCompareOp()) {
hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getCompareOp());
}
if (hasComparator()) {
hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
hash = (53 * hash) + getComparator().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CompareFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getComparatorFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
bitField0_ = (bitField0_ & ~0x00000001);
if (comparatorBuilder_ == null) {
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
} else {
comparatorBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.compareOp_ = compareOp_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (comparatorBuilder_ == null) {
result.comparator_ = comparator_;
} else {
result.comparator_ = comparatorBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) return this;
if (other.hasCompareOp()) {
setCompareOp(other.getCompareOp());
}
if (other.hasComparator()) {
mergeComparator(other.getComparator());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCompareOp()) {
return false;
}
if (hasComparator()) {
if (!getComparator().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.CompareType compare_op = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
public boolean hasCompareOp() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
return compareOp_;
}
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
compareOp_ = value;
onChanged();
return this;
}
/**
* <code>required .hbase.pb.CompareType compare_op = 1;</code>
*/
public Builder clearCompareOp() {
bitField0_ = (bitField0_ & ~0x00000001);
compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
onChanged();
return this;
}
// optional .hbase.pb.Comparator comparator = 2;
private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public boolean hasComparator() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
if (comparatorBuilder_ == null) {
return comparator_;
} else {
return comparatorBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
if (comparatorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
comparator_ = value;
onChanged();
} else {
comparatorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public Builder setComparator(
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
if (comparatorBuilder_ == null) {
comparator_ = builderForValue.build();
onChanged();
} else {
comparatorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
if (comparatorBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
comparator_ =
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
} else {
comparator_ = value;
}
onChanged();
} else {
comparatorBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public Builder clearComparator() {
if (comparatorBuilder_ == null) {
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
onChanged();
} else {
comparatorBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getComparatorFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
if (comparatorBuilder_ != null) {
return comparatorBuilder_.getMessageOrBuilder();
} else {
return comparator_;
}
}
/**
* <code>optional .hbase.pb.Comparator comparator = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>
getComparatorFieldBuilder() {
if (comparatorBuilder_ == null) {
comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
comparator_,
getParentForChildren(),
isClean());
comparator_ = null;
}
return comparatorBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CompareFilter)
}
static {
defaultInstance = new CompareFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CompareFilter)
}
public interface DependentColumnFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.CompareFilter compare_filter = 1;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
boolean hasCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
// optional bytes column_family = 2;
/**
* <code>optional bytes column_family = 2;</code>
*/
boolean hasColumnFamily();
/**
* <code>optional bytes column_family = 2;</code>
*/
com.google.protobuf.ByteString getColumnFamily();
// optional bytes column_qualifier = 3;
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
boolean hasColumnQualifier();
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
com.google.protobuf.ByteString getColumnQualifier();
// optional bool drop_dependent_column = 4;
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
boolean hasDropDependentColumn();
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
boolean getDropDependentColumn();
}
/**
* Protobuf type {@code hbase.pb.DependentColumnFilter}
*/
public static final class DependentColumnFilter extends
com.google.protobuf.GeneratedMessage
implements DependentColumnFilterOrBuilder {
// Use DependentColumnFilter.newBuilder() to construct.
private DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DependentColumnFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DependentColumnFilter defaultInstance;
public static DependentColumnFilter getDefaultInstance() {
return defaultInstance;
}
public DependentColumnFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DependentColumnFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = compareFilter_.toBuilder();
}
compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(compareFilter_);
compareFilter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
bitField0_ |= 0x00000002;
columnFamily_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
columnQualifier_ = input.readBytes();
break;
}
case 32: {
bitField0_ |= 0x00000008;
dropDependentColumn_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class);
}
public static com.google.protobuf.Parser<DependentColumnFilter> PARSER =
new com.google.protobuf.AbstractParser<DependentColumnFilter>() {
public DependentColumnFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DependentColumnFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DependentColumnFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
return compareFilter_;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
return compareFilter_;
}
// optional bytes column_family = 2;
public static final int COLUMN_FAMILY_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString columnFamily_;
/**
* <code>optional bytes column_family = 2;</code>
*/
public boolean hasColumnFamily() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column_family = 2;</code>
*/
public com.google.protobuf.ByteString getColumnFamily() {
return columnFamily_;
}
// optional bytes column_qualifier = 3;
public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString columnQualifier_;
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
public boolean hasColumnQualifier() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
public com.google.protobuf.ByteString getColumnQualifier() {
return columnQualifier_;
}
// optional bool drop_dependent_column = 4;
public static final int DROP_DEPENDENT_COLUMN_FIELD_NUMBER = 4;
private boolean dropDependentColumn_;
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
public boolean hasDropDependentColumn() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
public boolean getDropDependentColumn() {
return dropDependentColumn_;
}
private void initFields() {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
columnFamily_ = com.google.protobuf.ByteString.EMPTY;
columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
dropDependentColumn_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCompareFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getCompareFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, compareFilter_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, columnFamily_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, columnQualifier_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBool(4, dropDependentColumn_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, compareFilter_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, columnFamily_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, columnQualifier_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, dropDependentColumn_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) obj;
boolean result = true;
result = result && (hasCompareFilter() == other.hasCompareFilter());
if (hasCompareFilter()) {
result = result && getCompareFilter()
.equals(other.getCompareFilter());
}
result = result && (hasColumnFamily() == other.hasColumnFamily());
if (hasColumnFamily()) {
result = result && getColumnFamily()
.equals(other.getColumnFamily());
}
result = result && (hasColumnQualifier() == other.hasColumnQualifier());
if (hasColumnQualifier()) {
result = result && getColumnQualifier()
.equals(other.getColumnQualifier());
}
result = result && (hasDropDependentColumn() == other.hasDropDependentColumn());
if (hasDropDependentColumn()) {
result = result && (getDropDependentColumn()
== other.getDropDependentColumn());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCompareFilter()) {
hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
hash = (53 * hash) + getCompareFilter().hashCode();
}
if (hasColumnFamily()) {
hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER;
hash = (53 * hash) + getColumnFamily().hashCode();
}
if (hasColumnQualifier()) {
hash = (37 * hash) + COLUMN_QUALIFIER_FIELD_NUMBER;
hash = (53 * hash) + getColumnQualifier().hashCode();
}
if (hasDropDependentColumn()) {
hash = (37 * hash) + DROP_DEPENDENT_COLUMN_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getDropDependentColumn());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.DependentColumnFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getCompareFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
columnFamily_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
dropDependentColumn_ = false;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (compareFilterBuilder_ == null) {
result.compareFilter_ = compareFilter_;
} else {
result.compareFilter_ = compareFilterBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.columnFamily_ = columnFamily_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.columnQualifier_ = columnQualifier_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.dropDependentColumn_ = dropDependentColumn_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance()) return this;
if (other.hasCompareFilter()) {
mergeCompareFilter(other.getCompareFilter());
}
if (other.hasColumnFamily()) {
setColumnFamily(other.getColumnFamily());
}
if (other.hasColumnQualifier()) {
setColumnQualifier(other.getColumnQualifier());
}
if (other.hasDropDependentColumn()) {
setDropDependentColumn(other.getDropDependentColumn());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCompareFilter()) {
return false;
}
if (!getCompareFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
if (compareFilterBuilder_ == null) {
return compareFilter_;
} else {
return compareFilterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
compareFilter_ = value;
onChanged();
} else {
compareFilterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
if (compareFilterBuilder_ == null) {
compareFilter_ = builderForValue.build();
onChanged();
} else {
compareFilterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
compareFilter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
} else {
compareFilter_ = value;
}
onChanged();
} else {
compareFilterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder clearCompareFilter() {
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
onChanged();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompareFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
if (compareFilterBuilder_ != null) {
return compareFilterBuilder_.getMessageOrBuilder();
} else {
return compareFilter_;
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder() {
if (compareFilterBuilder_ == null) {
compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
compareFilter_,
getParentForChildren(),
isClean());
compareFilter_ = null;
}
return compareFilterBuilder_;
}
// optional bytes column_family = 2;
private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes column_family = 2;</code>
*/
public boolean hasColumnFamily() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column_family = 2;</code>
*/
public com.google.protobuf.ByteString getColumnFamily() {
return columnFamily_;
}
/**
* <code>optional bytes column_family = 2;</code>
*/
public Builder setColumnFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
columnFamily_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes column_family = 2;</code>
*/
public Builder clearColumnFamily() {
bitField0_ = (bitField0_ & ~0x00000002);
columnFamily_ = getDefaultInstance().getColumnFamily();
onChanged();
return this;
}
// optional bytes column_qualifier = 3;
private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
public boolean hasColumnQualifier() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
public com.google.protobuf.ByteString getColumnQualifier() {
return columnQualifier_;
}
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
public Builder setColumnQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
columnQualifier_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes column_qualifier = 3;</code>
*/
public Builder clearColumnQualifier() {
bitField0_ = (bitField0_ & ~0x00000004);
columnQualifier_ = getDefaultInstance().getColumnQualifier();
onChanged();
return this;
}
// optional bool drop_dependent_column = 4;
private boolean dropDependentColumn_ ;
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
public boolean hasDropDependentColumn() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
public boolean getDropDependentColumn() {
return dropDependentColumn_;
}
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
public Builder setDropDependentColumn(boolean value) {
bitField0_ |= 0x00000008;
dropDependentColumn_ = value;
onChanged();
return this;
}
/**
* <code>optional bool drop_dependent_column = 4;</code>
*/
public Builder clearDropDependentColumn() {
bitField0_ = (bitField0_ & ~0x00000008);
dropDependentColumn_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.DependentColumnFilter)
}
static {
defaultInstance = new DependentColumnFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.DependentColumnFilter)
}
public interface FamilyFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.CompareFilter compare_filter = 1;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
boolean hasCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.FamilyFilter}
*/
public static final class FamilyFilter extends
com.google.protobuf.GeneratedMessage
implements FamilyFilterOrBuilder {
// Use FamilyFilter.newBuilder() to construct.
private FamilyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FamilyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FamilyFilter defaultInstance;
public static FamilyFilter getDefaultInstance() {
return defaultInstance;
}
public FamilyFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FamilyFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = compareFilter_.toBuilder();
}
compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(compareFilter_);
compareFilter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class);
}
public static com.google.protobuf.Parser<FamilyFilter> PARSER =
new com.google.protobuf.AbstractParser<FamilyFilter>() {
public FamilyFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FamilyFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FamilyFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
return compareFilter_;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
return compareFilter_;
}
private void initFields() {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCompareFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getCompareFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, compareFilter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, compareFilter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) obj;
boolean result = true;
result = result && (hasCompareFilter() == other.hasCompareFilter());
if (hasCompareFilter()) {
result = result && getCompareFilter()
.equals(other.getCompareFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCompareFilter()) {
hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
hash = (53 * hash) + getCompareFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.FamilyFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getCompareFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (compareFilterBuilder_ == null) {
result.compareFilter_ = compareFilter_;
} else {
result.compareFilter_ = compareFilterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance()) return this;
if (other.hasCompareFilter()) {
mergeCompareFilter(other.getCompareFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCompareFilter()) {
return false;
}
if (!getCompareFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
if (compareFilterBuilder_ == null) {
return compareFilter_;
} else {
return compareFilterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
compareFilter_ = value;
onChanged();
} else {
compareFilterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
if (compareFilterBuilder_ == null) {
compareFilter_ = builderForValue.build();
onChanged();
} else {
compareFilterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
compareFilter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
} else {
compareFilter_ = value;
}
onChanged();
} else {
compareFilterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder clearCompareFilter() {
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
onChanged();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompareFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
if (compareFilterBuilder_ != null) {
return compareFilterBuilder_.getMessageOrBuilder();
} else {
return compareFilter_;
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder() {
if (compareFilterBuilder_ == null) {
compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
compareFilter_,
getParentForChildren(),
isClean());
compareFilter_ = null;
}
return compareFilterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.FamilyFilter)
}
static {
defaultInstance = new FamilyFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.FamilyFilter)
}
public interface FilterListOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.FilterList.Operator operator = 1;
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
boolean hasOperator();
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator();
// repeated .hbase.pb.Filter filters = 2;
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>
getFiltersList();
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index);
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
int getFiltersCount();
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersOrBuilderList();
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.FilterList}
*/
public static final class FilterList extends
com.google.protobuf.GeneratedMessage
implements FilterListOrBuilder {
// Use FilterList.newBuilder() to construct.
private FilterList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FilterList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FilterList defaultInstance;
public static FilterList getDefaultInstance() {
return defaultInstance;
}
public FilterList getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FilterList(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
operator_ = value;
}
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>();
mutable_bitField0_ |= 0x00000002;
}
filters_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
filters_ = java.util.Collections.unmodifiableList(filters_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class);
}
public static com.google.protobuf.Parser<FilterList> PARSER =
new com.google.protobuf.AbstractParser<FilterList>() {
public FilterList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FilterList(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FilterList> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code hbase.pb.FilterList.Operator}
*/
public enum Operator
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>MUST_PASS_ALL = 1;</code>
*/
MUST_PASS_ALL(0, 1),
/**
* <code>MUST_PASS_ONE = 2;</code>
*/
MUST_PASS_ONE(1, 2),
;
/**
* <code>MUST_PASS_ALL = 1;</code>
*/
public static final int MUST_PASS_ALL_VALUE = 1;
/**
* <code>MUST_PASS_ONE = 2;</code>
*/
public static final int MUST_PASS_ONE_VALUE = 2;
public final int getNumber() { return value; }
public static Operator valueOf(int value) {
switch (value) {
case 1: return MUST_PASS_ALL;
case 2: return MUST_PASS_ONE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Operator>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<Operator>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Operator>() {
public Operator findValueByNumber(int number) {
return Operator.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor().getEnumTypes().get(0);
}
private static final Operator[] VALUES = values();
public static Operator valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private Operator(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.FilterList.Operator)
}
private int bitField0_;
// required .hbase.pb.FilterList.Operator operator = 1;
public static final int OPERATOR_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_;
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
public boolean hasOperator() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() {
return operator_;
}
// repeated .hbase.pb.Filter filters = 2;
public static final int FILTERS_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_;
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() {
return filters_;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersOrBuilderList() {
return filters_;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public int getFiltersCount() {
return filters_.size();
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) {
return filters_.get(index);
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder(
int index) {
return filters_.get(index);
}
private void initFields() {
operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
filters_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasOperator()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getFiltersCount(); i++) {
if (!getFilters(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, operator_.getNumber());
}
for (int i = 0; i < filters_.size(); i++) {
output.writeMessage(2, filters_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, operator_.getNumber());
}
for (int i = 0; i < filters_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, filters_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) obj;
boolean result = true;
result = result && (hasOperator() == other.hasOperator());
if (hasOperator()) {
result = result &&
(getOperator() == other.getOperator());
}
result = result && getFiltersList()
.equals(other.getFiltersList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasOperator()) {
hash = (37 * hash) + OPERATOR_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getOperator());
}
if (getFiltersCount() > 0) {
hash = (37 * hash) + FILTERS_FIELD_NUMBER;
hash = (53 * hash) + getFiltersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.FilterList}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterListOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getFiltersFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
bitField0_ = (bitField0_ & ~0x00000001);
if (filtersBuilder_ == null) {
filters_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
filtersBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.operator_ = operator_;
if (filtersBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
filters_ = java.util.Collections.unmodifiableList(filters_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.filters_ = filters_;
} else {
result.filters_ = filtersBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance()) return this;
if (other.hasOperator()) {
setOperator(other.getOperator());
}
if (filtersBuilder_ == null) {
if (!other.filters_.isEmpty()) {
if (filters_.isEmpty()) {
filters_ = other.filters_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureFiltersIsMutable();
filters_.addAll(other.filters_);
}
onChanged();
}
} else {
if (!other.filters_.isEmpty()) {
if (filtersBuilder_.isEmpty()) {
filtersBuilder_.dispose();
filtersBuilder_ = null;
filters_ = other.filters_;
bitField0_ = (bitField0_ & ~0x00000002);
filtersBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getFiltersFieldBuilder() : null;
} else {
filtersBuilder_.addAllMessages(other.filters_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasOperator()) {
return false;
}
for (int i = 0; i < getFiltersCount(); i++) {
if (!getFilters(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.FilterList.Operator operator = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
public boolean hasOperator() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() {
return operator_;
}
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
public Builder setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
operator_ = value;
onChanged();
return this;
}
/**
* <code>required .hbase.pb.FilterList.Operator operator = 1;</code>
*/
public Builder clearOperator() {
bitField0_ = (bitField0_ & ~0x00000001);
operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
onChanged();
return this;
}
// repeated .hbase.pb.Filter filters = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_ =
java.util.Collections.emptyList();
private void ensureFiltersIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>(filters_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filtersBuilder_;
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() {
if (filtersBuilder_ == null) {
return java.util.Collections.unmodifiableList(filters_);
} else {
return filtersBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public int getFiltersCount() {
if (filtersBuilder_ == null) {
return filters_.size();
} else {
return filtersBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) {
if (filtersBuilder_ == null) {
return filters_.get(index);
} else {
return filtersBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder setFilters(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filtersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFiltersIsMutable();
filters_.set(index, value);
onChanged();
} else {
filtersBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder setFilters(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filtersBuilder_ == null) {
ensureFiltersIsMutable();
filters_.set(index, builderForValue.build());
onChanged();
} else {
filtersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder addFilters(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filtersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFiltersIsMutable();
filters_.add(value);
onChanged();
} else {
filtersBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder addFilters(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filtersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFiltersIsMutable();
filters_.add(index, value);
onChanged();
} else {
filtersBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder addFilters(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filtersBuilder_ == null) {
ensureFiltersIsMutable();
filters_.add(builderForValue.build());
onChanged();
} else {
filtersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder addFilters(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filtersBuilder_ == null) {
ensureFiltersIsMutable();
filters_.add(index, builderForValue.build());
onChanged();
} else {
filtersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder addAllFilters(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> values) {
if (filtersBuilder_ == null) {
ensureFiltersIsMutable();
super.addAll(values, filters_);
onChanged();
} else {
filtersBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder clearFilters() {
if (filtersBuilder_ == null) {
filters_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
filtersBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public Builder removeFilters(int index) {
if (filtersBuilder_ == null) {
ensureFiltersIsMutable();
filters_.remove(index);
onChanged();
} else {
filtersBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFiltersBuilder(
int index) {
return getFiltersFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder(
int index) {
if (filtersBuilder_ == null) {
return filters_.get(index); } else {
return filtersBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersOrBuilderList() {
if (filtersBuilder_ != null) {
return filtersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(filters_);
}
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder() {
return getFiltersFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder(
int index) {
return getFiltersFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Filter filters = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder>
getFiltersBuilderList() {
return getFiltersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersFieldBuilder() {
if (filtersBuilder_ == null) {
filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
filters_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
filters_ = null;
}
return filtersBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.FilterList)
}
static {
defaultInstance = new FilterList(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.FilterList)
}
public interface FilterWrapperOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.Filter filter = 1;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
boolean hasFilter();
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.FilterWrapper}
*/
public static final class FilterWrapper extends
com.google.protobuf.GeneratedMessage
implements FilterWrapperOrBuilder {
// Use FilterWrapper.newBuilder() to construct.
private FilterWrapper(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FilterWrapper(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FilterWrapper defaultInstance;
public static FilterWrapper getDefaultInstance() {
return defaultInstance;
}
public FilterWrapper getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FilterWrapper(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = filter_.toBuilder();
}
filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(filter_);
filter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class);
}
public static com.google.protobuf.Parser<FilterWrapper> PARSER =
new com.google.protobuf.AbstractParser<FilterWrapper>() {
public FilterWrapper parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FilterWrapper(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FilterWrapper> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.Filter filter = 1;
public static final int FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
return filter_;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
return filter_;
}
private void initFields() {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, filter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) obj;
boolean result = true;
result = result && (hasFilter() == other.hasFilter());
if (hasFilter()) {
result = result && getFilter()
.equals(other.getFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFilter()) {
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.FilterWrapper}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapperOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (filterBuilder_ == null) {
result.filter_ = filter_;
} else {
result.filter_ = filterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance()) return this;
if (other.hasFilter()) {
mergeFilter(other.getFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasFilter()) {
return false;
}
if (!getFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.Filter filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
if (filterBuilder_ == null) {
return filter_;
} else {
return filterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
} else {
filterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder setFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filterBuilder_ == null) {
filter_ = builderForValue.build();
onChanged();
} else {
filterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
filter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
} else {
filter_ = value;
}
onChanged();
} else {
filterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder clearFilter() {
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
onChanged();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
if (filterBuilder_ != null) {
return filterBuilder_.getMessageOrBuilder();
} else {
return filter_;
}
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder() {
if (filterBuilder_ == null) {
filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
filter_,
getParentForChildren(),
isClean());
filter_ = null;
}
return filterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.FilterWrapper)
}
static {
defaultInstance = new FilterWrapper(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.FilterWrapper)
}
public interface FirstKeyOnlyFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hbase.pb.FirstKeyOnlyFilter}
*/
public static final class FirstKeyOnlyFilter extends
com.google.protobuf.GeneratedMessage
implements FirstKeyOnlyFilterOrBuilder {
// Use FirstKeyOnlyFilter.newBuilder() to construct.
private FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FirstKeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FirstKeyOnlyFilter defaultInstance;
public static FirstKeyOnlyFilter getDefaultInstance() {
return defaultInstance;
}
public FirstKeyOnlyFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FirstKeyOnlyFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class);
}
public static com.google.protobuf.Parser<FirstKeyOnlyFilter> PARSER =
new com.google.protobuf.AbstractParser<FirstKeyOnlyFilter>() {
public FirstKeyOnlyFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FirstKeyOnlyFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FirstKeyOnlyFilter> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.FirstKeyOnlyFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.FirstKeyOnlyFilter)
}
static {
defaultInstance = new FirstKeyOnlyFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyOnlyFilter)
}
public interface FirstKeyValueMatchingQualifiersFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes qualifiers = 1;
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getQualifiersList();
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
int getQualifiersCount();
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
com.google.protobuf.ByteString getQualifiers(int index);
}
/**
* Protobuf type {@code hbase.pb.FirstKeyValueMatchingQualifiersFilter}
*/
public static final class FirstKeyValueMatchingQualifiersFilter extends
com.google.protobuf.GeneratedMessage
implements FirstKeyValueMatchingQualifiersFilterOrBuilder {
// Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct.
private FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FirstKeyValueMatchingQualifiersFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FirstKeyValueMatchingQualifiersFilter defaultInstance;
public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() {
return defaultInstance;
}
public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FirstKeyValueMatchingQualifiersFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
qualifiers_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class);
}
public static com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> PARSER =
new com.google.protobuf.AbstractParser<FirstKeyValueMatchingQualifiersFilter>() {
public FirstKeyValueMatchingQualifiersFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FirstKeyValueMatchingQualifiersFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> getParserForType() {
return PARSER;
}
// repeated bytes qualifiers = 1;
public static final int QUALIFIERS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> qualifiers_;
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getQualifiersList() {
return qualifiers_;
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public int getQualifiersCount() {
return qualifiers_.size();
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public com.google.protobuf.ByteString getQualifiers(int index) {
return qualifiers_.get(index);
}
private void initFields() {
qualifiers_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < qualifiers_.size(); i++) {
output.writeBytes(1, qualifiers_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < qualifiers_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(qualifiers_.get(i));
}
size += dataSize;
size += 1 * getQualifiersList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) obj;
boolean result = true;
result = result && getQualifiersList()
.equals(other.getQualifiersList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getQualifiersCount() > 0) {
hash = (37 * hash) + QUALIFIERS_FIELD_NUMBER;
hash = (53 * hash) + getQualifiersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.FirstKeyValueMatchingQualifiersFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
qualifiers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.qualifiers_ = qualifiers_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance()) return this;
if (!other.qualifiers_.isEmpty()) {
if (qualifiers_.isEmpty()) {
qualifiers_ = other.qualifiers_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureQualifiersIsMutable();
qualifiers_.addAll(other.qualifiers_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes qualifiers = 1;
private java.util.List<com.google.protobuf.ByteString> qualifiers_ = java.util.Collections.emptyList();
private void ensureQualifiersIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifiers_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getQualifiersList() {
return java.util.Collections.unmodifiableList(qualifiers_);
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public int getQualifiersCount() {
return qualifiers_.size();
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public com.google.protobuf.ByteString getQualifiers(int index) {
return qualifiers_.get(index);
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public Builder setQualifiers(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureQualifiersIsMutable();
qualifiers_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public Builder addQualifiers(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureQualifiersIsMutable();
qualifiers_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public Builder addAllQualifiers(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureQualifiersIsMutable();
super.addAll(values, qualifiers_);
onChanged();
return this;
}
/**
* <code>repeated bytes qualifiers = 1;</code>
*/
public Builder clearQualifiers() {
qualifiers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.FirstKeyValueMatchingQualifiersFilter)
}
static {
defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyValueMatchingQualifiersFilter)
}
public interface FuzzyRowFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>
getFuzzyKeysDataList();
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index);
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
int getFuzzyKeysDataCount();
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataOrBuilderList();
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.FuzzyRowFilter}
*/
public static final class FuzzyRowFilter extends
com.google.protobuf.GeneratedMessage
implements FuzzyRowFilterOrBuilder {
// Use FuzzyRowFilter.newBuilder() to construct.
private FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FuzzyRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FuzzyRowFilter defaultInstance;
public static FuzzyRowFilter getDefaultInstance() {
return defaultInstance;
}
public FuzzyRowFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FuzzyRowFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>();
mutable_bitField0_ |= 0x00000001;
}
fuzzyKeysData_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class);
}
public static com.google.protobuf.Parser<FuzzyRowFilter> PARSER =
new com.google.protobuf.AbstractParser<FuzzyRowFilter>() {
public FuzzyRowFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FuzzyRowFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FuzzyRowFilter> getParserForType() {
return PARSER;
}
// repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;
public static final int FUZZY_KEYS_DATA_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_;
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() {
return fuzzyKeysData_;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataOrBuilderList() {
return fuzzyKeysData_;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public int getFuzzyKeysDataCount() {
return fuzzyKeysData_.size();
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) {
return fuzzyKeysData_.get(index);
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder(
int index) {
return fuzzyKeysData_.get(index);
}
private void initFields() {
fuzzyKeysData_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getFuzzyKeysDataCount(); i++) {
if (!getFuzzyKeysData(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < fuzzyKeysData_.size(); i++) {
output.writeMessage(1, fuzzyKeysData_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < fuzzyKeysData_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, fuzzyKeysData_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) obj;
boolean result = true;
result = result && getFuzzyKeysDataList()
.equals(other.getFuzzyKeysDataList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getFuzzyKeysDataCount() > 0) {
hash = (37 * hash) + FUZZY_KEYS_DATA_FIELD_NUMBER;
hash = (53 * hash) + getFuzzyKeysDataList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.FuzzyRowFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getFuzzyKeysDataFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (fuzzyKeysDataBuilder_ == null) {
fuzzyKeysData_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
fuzzyKeysDataBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter(this);
int from_bitField0_ = bitField0_;
if (fuzzyKeysDataBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.fuzzyKeysData_ = fuzzyKeysData_;
} else {
result.fuzzyKeysData_ = fuzzyKeysDataBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance()) return this;
if (fuzzyKeysDataBuilder_ == null) {
if (!other.fuzzyKeysData_.isEmpty()) {
if (fuzzyKeysData_.isEmpty()) {
fuzzyKeysData_ = other.fuzzyKeysData_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.addAll(other.fuzzyKeysData_);
}
onChanged();
}
} else {
if (!other.fuzzyKeysData_.isEmpty()) {
if (fuzzyKeysDataBuilder_.isEmpty()) {
fuzzyKeysDataBuilder_.dispose();
fuzzyKeysDataBuilder_ = null;
fuzzyKeysData_ = other.fuzzyKeysData_;
bitField0_ = (bitField0_ & ~0x00000001);
fuzzyKeysDataBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getFuzzyKeysDataFieldBuilder() : null;
} else {
fuzzyKeysDataBuilder_.addAllMessages(other.fuzzyKeysData_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getFuzzyKeysDataCount(); i++) {
if (!getFuzzyKeysData(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_ =
java.util.Collections.emptyList();
private void ensureFuzzyKeysDataIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(fuzzyKeysData_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_;
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() {
if (fuzzyKeysDataBuilder_ == null) {
return java.util.Collections.unmodifiableList(fuzzyKeysData_);
} else {
return fuzzyKeysDataBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public int getFuzzyKeysDataCount() {
if (fuzzyKeysDataBuilder_ == null) {
return fuzzyKeysData_.size();
} else {
return fuzzyKeysDataBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) {
if (fuzzyKeysDataBuilder_ == null) {
return fuzzyKeysData_.get(index);
} else {
return fuzzyKeysDataBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder setFuzzyKeysData(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
if (fuzzyKeysDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.set(index, value);
onChanged();
} else {
fuzzyKeysDataBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder setFuzzyKeysData(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
if (fuzzyKeysDataBuilder_ == null) {
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.set(index, builderForValue.build());
onChanged();
} else {
fuzzyKeysDataBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
if (fuzzyKeysDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.add(value);
onChanged();
} else {
fuzzyKeysDataBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder addFuzzyKeysData(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
if (fuzzyKeysDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.add(index, value);
onChanged();
} else {
fuzzyKeysDataBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder addFuzzyKeysData(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
if (fuzzyKeysDataBuilder_ == null) {
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.add(builderForValue.build());
onChanged();
} else {
fuzzyKeysDataBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder addFuzzyKeysData(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
if (fuzzyKeysDataBuilder_ == null) {
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.add(index, builderForValue.build());
onChanged();
} else {
fuzzyKeysDataBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder addAllFuzzyKeysData(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) {
if (fuzzyKeysDataBuilder_ == null) {
ensureFuzzyKeysDataIsMutable();
super.addAll(values, fuzzyKeysData_);
onChanged();
} else {
fuzzyKeysDataBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder clearFuzzyKeysData() {
if (fuzzyKeysDataBuilder_ == null) {
fuzzyKeysData_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
fuzzyKeysDataBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public Builder removeFuzzyKeysData(int index) {
if (fuzzyKeysDataBuilder_ == null) {
ensureFuzzyKeysDataIsMutable();
fuzzyKeysData_.remove(index);
onChanged();
} else {
fuzzyKeysDataBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getFuzzyKeysDataBuilder(
int index) {
return getFuzzyKeysDataFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder(
int index) {
if (fuzzyKeysDataBuilder_ == null) {
return fuzzyKeysData_.get(index); } else {
return fuzzyKeysDataBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataOrBuilderList() {
if (fuzzyKeysDataBuilder_ != null) {
return fuzzyKeysDataBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(fuzzyKeysData_);
}
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder() {
return getFuzzyKeysDataFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder(
int index) {
return getFuzzyKeysDataFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder>
getFuzzyKeysDataBuilderList() {
return getFuzzyKeysDataFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataFieldBuilder() {
if (fuzzyKeysDataBuilder_ == null) {
fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>(
fuzzyKeysData_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
fuzzyKeysData_ = null;
}
return fuzzyKeysDataBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.FuzzyRowFilter)
}
static {
defaultInstance = new FuzzyRowFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.FuzzyRowFilter)
}
public interface InclusiveStopFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes stop_row_key = 1;
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
boolean hasStopRowKey();
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
com.google.protobuf.ByteString getStopRowKey();
}
/**
* Protobuf type {@code hbase.pb.InclusiveStopFilter}
*/
public static final class InclusiveStopFilter extends
com.google.protobuf.GeneratedMessage
implements InclusiveStopFilterOrBuilder {
// Use InclusiveStopFilter.newBuilder() to construct.
private InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private InclusiveStopFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final InclusiveStopFilter defaultInstance;
public static InclusiveStopFilter getDefaultInstance() {
return defaultInstance;
}
public InclusiveStopFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private InclusiveStopFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
stopRowKey_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class);
}
public static com.google.protobuf.Parser<InclusiveStopFilter> PARSER =
new com.google.protobuf.AbstractParser<InclusiveStopFilter>() {
public InclusiveStopFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new InclusiveStopFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<InclusiveStopFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes stop_row_key = 1;
public static final int STOP_ROW_KEY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString stopRowKey_;
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
public boolean hasStopRowKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
public com.google.protobuf.ByteString getStopRowKey() {
return stopRowKey_;
}
private void initFields() {
stopRowKey_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, stopRowKey_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, stopRowKey_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) obj;
boolean result = true;
result = result && (hasStopRowKey() == other.hasStopRowKey());
if (hasStopRowKey()) {
result = result && getStopRowKey()
.equals(other.getStopRowKey());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasStopRowKey()) {
hash = (37 * hash) + STOP_ROW_KEY_FIELD_NUMBER;
hash = (53 * hash) + getStopRowKey().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.InclusiveStopFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
stopRowKey_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.stopRowKey_ = stopRowKey_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance()) return this;
if (other.hasStopRowKey()) {
setStopRowKey(other.getStopRowKey());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes stop_row_key = 1;
private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
public boolean hasStopRowKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
public com.google.protobuf.ByteString getStopRowKey() {
return stopRowKey_;
}
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
public Builder setStopRowKey(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
stopRowKey_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes stop_row_key = 1;</code>
*/
public Builder clearStopRowKey() {
bitField0_ = (bitField0_ & ~0x00000001);
stopRowKey_ = getDefaultInstance().getStopRowKey();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.InclusiveStopFilter)
}
static {
defaultInstance = new InclusiveStopFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.InclusiveStopFilter)
}
public interface KeyOnlyFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bool len_as_val = 1;
/**
* <code>required bool len_as_val = 1;</code>
*/
boolean hasLenAsVal();
/**
* <code>required bool len_as_val = 1;</code>
*/
boolean getLenAsVal();
}
/**
* Protobuf type {@code hbase.pb.KeyOnlyFilter}
*/
public static final class KeyOnlyFilter extends
com.google.protobuf.GeneratedMessage
implements KeyOnlyFilterOrBuilder {
// Use KeyOnlyFilter.newBuilder() to construct.
private KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private KeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final KeyOnlyFilter defaultInstance;
public static KeyOnlyFilter getDefaultInstance() {
return defaultInstance;
}
public KeyOnlyFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private KeyOnlyFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
lenAsVal_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class);
}
public static com.google.protobuf.Parser<KeyOnlyFilter> PARSER =
new com.google.protobuf.AbstractParser<KeyOnlyFilter>() {
public KeyOnlyFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new KeyOnlyFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<KeyOnlyFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bool len_as_val = 1;
public static final int LEN_AS_VAL_FIELD_NUMBER = 1;
private boolean lenAsVal_;
/**
* <code>required bool len_as_val = 1;</code>
*/
public boolean hasLenAsVal() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool len_as_val = 1;</code>
*/
public boolean getLenAsVal() {
return lenAsVal_;
}
private void initFields() {
lenAsVal_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasLenAsVal()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBool(1, lenAsVal_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(1, lenAsVal_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) obj;
boolean result = true;
result = result && (hasLenAsVal() == other.hasLenAsVal());
if (hasLenAsVal()) {
result = result && (getLenAsVal()
== other.getLenAsVal());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasLenAsVal()) {
hash = (37 * hash) + LEN_AS_VAL_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getLenAsVal());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.KeyOnlyFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
lenAsVal_ = false;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lenAsVal_ = lenAsVal_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance()) return this;
if (other.hasLenAsVal()) {
setLenAsVal(other.getLenAsVal());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasLenAsVal()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bool len_as_val = 1;
private boolean lenAsVal_ ;
/**
* <code>required bool len_as_val = 1;</code>
*/
public boolean hasLenAsVal() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool len_as_val = 1;</code>
*/
public boolean getLenAsVal() {
return lenAsVal_;
}
/**
* <code>required bool len_as_val = 1;</code>
*/
public Builder setLenAsVal(boolean value) {
bitField0_ |= 0x00000001;
lenAsVal_ = value;
onChanged();
return this;
}
/**
* <code>required bool len_as_val = 1;</code>
*/
public Builder clearLenAsVal() {
bitField0_ = (bitField0_ & ~0x00000001);
lenAsVal_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.KeyOnlyFilter)
}
static {
defaultInstance = new KeyOnlyFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.KeyOnlyFilter)
}
public interface MultipleColumnPrefixFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes sorted_prefixes = 1;
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getSortedPrefixesList();
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
int getSortedPrefixesCount();
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
com.google.protobuf.ByteString getSortedPrefixes(int index);
}
/**
* Protobuf type {@code hbase.pb.MultipleColumnPrefixFilter}
*/
public static final class MultipleColumnPrefixFilter extends
com.google.protobuf.GeneratedMessage
implements MultipleColumnPrefixFilterOrBuilder {
// Use MultipleColumnPrefixFilter.newBuilder() to construct.
private MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultipleColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultipleColumnPrefixFilter defaultInstance;
public static MultipleColumnPrefixFilter getDefaultInstance() {
return defaultInstance;
}
public MultipleColumnPrefixFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultipleColumnPrefixFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
sortedPrefixes_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
sortedPrefixes_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class);
}
public static com.google.protobuf.Parser<MultipleColumnPrefixFilter> PARSER =
new com.google.protobuf.AbstractParser<MultipleColumnPrefixFilter>() {
public MultipleColumnPrefixFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultipleColumnPrefixFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultipleColumnPrefixFilter> getParserForType() {
return PARSER;
}
// repeated bytes sorted_prefixes = 1;
public static final int SORTED_PREFIXES_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> sortedPrefixes_;
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getSortedPrefixesList() {
return sortedPrefixes_;
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public int getSortedPrefixesCount() {
return sortedPrefixes_.size();
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public com.google.protobuf.ByteString getSortedPrefixes(int index) {
return sortedPrefixes_.get(index);
}
private void initFields() {
sortedPrefixes_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < sortedPrefixes_.size(); i++) {
output.writeBytes(1, sortedPrefixes_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < sortedPrefixes_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(sortedPrefixes_.get(i));
}
size += dataSize;
size += 1 * getSortedPrefixesList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) obj;
boolean result = true;
result = result && getSortedPrefixesList()
.equals(other.getSortedPrefixesList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getSortedPrefixesCount() > 0) {
hash = (37 * hash) + SORTED_PREFIXES_FIELD_NUMBER;
hash = (53 * hash) + getSortedPrefixesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MultipleColumnPrefixFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
sortedPrefixes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.sortedPrefixes_ = sortedPrefixes_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance()) return this;
if (!other.sortedPrefixes_.isEmpty()) {
if (sortedPrefixes_.isEmpty()) {
sortedPrefixes_ = other.sortedPrefixes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSortedPrefixesIsMutable();
sortedPrefixes_.addAll(other.sortedPrefixes_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes sorted_prefixes = 1;
private java.util.List<com.google.protobuf.ByteString> sortedPrefixes_ = java.util.Collections.emptyList();
private void ensureSortedPrefixesIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
sortedPrefixes_ = new java.util.ArrayList<com.google.protobuf.ByteString>(sortedPrefixes_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getSortedPrefixesList() {
return java.util.Collections.unmodifiableList(sortedPrefixes_);
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public int getSortedPrefixesCount() {
return sortedPrefixes_.size();
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public com.google.protobuf.ByteString getSortedPrefixes(int index) {
return sortedPrefixes_.get(index);
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public Builder setSortedPrefixes(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureSortedPrefixesIsMutable();
sortedPrefixes_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public Builder addSortedPrefixes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureSortedPrefixesIsMutable();
sortedPrefixes_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public Builder addAllSortedPrefixes(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureSortedPrefixesIsMutable();
super.addAll(values, sortedPrefixes_);
onChanged();
return this;
}
/**
* <code>repeated bytes sorted_prefixes = 1;</code>
*/
public Builder clearSortedPrefixes() {
sortedPrefixes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MultipleColumnPrefixFilter)
}
static {
defaultInstance = new MultipleColumnPrefixFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MultipleColumnPrefixFilter)
}
public interface PageFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int64 page_size = 1;
/**
* <code>required int64 page_size = 1;</code>
*/
boolean hasPageSize();
/**
* <code>required int64 page_size = 1;</code>
*/
long getPageSize();
}
/**
* Protobuf type {@code hbase.pb.PageFilter}
*/
public static final class PageFilter extends
com.google.protobuf.GeneratedMessage
implements PageFilterOrBuilder {
// Use PageFilter.newBuilder() to construct.
private PageFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PageFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PageFilter defaultInstance;
public static PageFilter getDefaultInstance() {
return defaultInstance;
}
public PageFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PageFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
pageSize_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class);
}
public static com.google.protobuf.Parser<PageFilter> PARSER =
new com.google.protobuf.AbstractParser<PageFilter>() {
public PageFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PageFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PageFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int64 page_size = 1;
public static final int PAGE_SIZE_FIELD_NUMBER = 1;
private long pageSize_;
/**
* <code>required int64 page_size = 1;</code>
*/
public boolean hasPageSize() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 page_size = 1;</code>
*/
public long getPageSize() {
return pageSize_;
}
private void initFields() {
pageSize_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasPageSize()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, pageSize_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, pageSize_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) obj;
boolean result = true;
result = result && (hasPageSize() == other.hasPageSize());
if (hasPageSize()) {
result = result && (getPageSize()
== other.getPageSize());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasPageSize()) {
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getPageSize());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.PageFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
pageSize_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.pageSize_ = pageSize_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance()) return this;
if (other.hasPageSize()) {
setPageSize(other.getPageSize());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasPageSize()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required int64 page_size = 1;
private long pageSize_ ;
/**
* <code>required int64 page_size = 1;</code>
*/
public boolean hasPageSize() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 page_size = 1;</code>
*/
public long getPageSize() {
return pageSize_;
}
/**
* <code>required int64 page_size = 1;</code>
*/
public Builder setPageSize(long value) {
bitField0_ |= 0x00000001;
pageSize_ = value;
onChanged();
return this;
}
/**
* <code>required int64 page_size = 1;</code>
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000001);
pageSize_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.PageFilter)
}
static {
defaultInstance = new PageFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.PageFilter)
}
public interface PrefixFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes prefix = 1;
/**
* <code>optional bytes prefix = 1;</code>
*/
boolean hasPrefix();
/**
* <code>optional bytes prefix = 1;</code>
*/
com.google.protobuf.ByteString getPrefix();
}
/**
* Protobuf type {@code hbase.pb.PrefixFilter}
*/
public static final class PrefixFilter extends
com.google.protobuf.GeneratedMessage
implements PrefixFilterOrBuilder {
// Use PrefixFilter.newBuilder() to construct.
private PrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrefixFilter defaultInstance;
public static PrefixFilter getDefaultInstance() {
return defaultInstance;
}
public PrefixFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrefixFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
prefix_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class);
}
public static com.google.protobuf.Parser<PrefixFilter> PARSER =
new com.google.protobuf.AbstractParser<PrefixFilter>() {
public PrefixFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrefixFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrefixFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes prefix = 1;
public static final int PREFIX_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString prefix_;
/**
* <code>optional bytes prefix = 1;</code>
*/
public boolean hasPrefix() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes prefix = 1;</code>
*/
public com.google.protobuf.ByteString getPrefix() {
return prefix_;
}
private void initFields() {
prefix_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, prefix_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, prefix_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) obj;
boolean result = true;
result = result && (hasPrefix() == other.hasPrefix());
if (hasPrefix()) {
result = result && getPrefix()
.equals(other.getPrefix());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasPrefix()) {
hash = (37 * hash) + PREFIX_FIELD_NUMBER;
hash = (53 * hash) + getPrefix().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.PrefixFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
prefix_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.prefix_ = prefix_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance()) return this;
if (other.hasPrefix()) {
setPrefix(other.getPrefix());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes prefix = 1;
private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes prefix = 1;</code>
*/
public boolean hasPrefix() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes prefix = 1;</code>
*/
public com.google.protobuf.ByteString getPrefix() {
return prefix_;
}
/**
* <code>optional bytes prefix = 1;</code>
*/
public Builder setPrefix(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
prefix_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes prefix = 1;</code>
*/
public Builder clearPrefix() {
bitField0_ = (bitField0_ & ~0x00000001);
prefix_ = getDefaultInstance().getPrefix();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.PrefixFilter)
}
static {
defaultInstance = new PrefixFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.PrefixFilter)
}
public interface QualifierFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.CompareFilter compare_filter = 1;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
boolean hasCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.QualifierFilter}
*/
public static final class QualifierFilter extends
com.google.protobuf.GeneratedMessage
implements QualifierFilterOrBuilder {
// Use QualifierFilter.newBuilder() to construct.
private QualifierFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private QualifierFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final QualifierFilter defaultInstance;
public static QualifierFilter getDefaultInstance() {
return defaultInstance;
}
public QualifierFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private QualifierFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = compareFilter_.toBuilder();
}
compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(compareFilter_);
compareFilter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class);
}
public static com.google.protobuf.Parser<QualifierFilter> PARSER =
new com.google.protobuf.AbstractParser<QualifierFilter>() {
public QualifierFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new QualifierFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<QualifierFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
return compareFilter_;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
return compareFilter_;
}
private void initFields() {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCompareFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getCompareFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, compareFilter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, compareFilter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) obj;
boolean result = true;
result = result && (hasCompareFilter() == other.hasCompareFilter());
if (hasCompareFilter()) {
result = result && getCompareFilter()
.equals(other.getCompareFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCompareFilter()) {
hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
hash = (53 * hash) + getCompareFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.QualifierFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getCompareFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (compareFilterBuilder_ == null) {
result.compareFilter_ = compareFilter_;
} else {
result.compareFilter_ = compareFilterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance()) return this;
if (other.hasCompareFilter()) {
mergeCompareFilter(other.getCompareFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCompareFilter()) {
return false;
}
if (!getCompareFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
if (compareFilterBuilder_ == null) {
return compareFilter_;
} else {
return compareFilterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
compareFilter_ = value;
onChanged();
} else {
compareFilterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
if (compareFilterBuilder_ == null) {
compareFilter_ = builderForValue.build();
onChanged();
} else {
compareFilterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
compareFilter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
} else {
compareFilter_ = value;
}
onChanged();
} else {
compareFilterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder clearCompareFilter() {
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
onChanged();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompareFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
if (compareFilterBuilder_ != null) {
return compareFilterBuilder_.getMessageOrBuilder();
} else {
return compareFilter_;
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder() {
if (compareFilterBuilder_ == null) {
compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
compareFilter_,
getParentForChildren(),
isClean());
compareFilter_ = null;
}
return compareFilterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.QualifierFilter)
}
static {
defaultInstance = new QualifierFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.QualifierFilter)
}
public interface RandomRowFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required float chance = 1;
/**
* <code>required float chance = 1;</code>
*/
boolean hasChance();
/**
* <code>required float chance = 1;</code>
*/
float getChance();
}
/**
* Protobuf type {@code hbase.pb.RandomRowFilter}
*/
public static final class RandomRowFilter extends
com.google.protobuf.GeneratedMessage
implements RandomRowFilterOrBuilder {
// Use RandomRowFilter.newBuilder() to construct.
private RandomRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RandomRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RandomRowFilter defaultInstance;
public static RandomRowFilter getDefaultInstance() {
return defaultInstance;
}
public RandomRowFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RandomRowFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 13: {
bitField0_ |= 0x00000001;
chance_ = input.readFloat();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class);
}
public static com.google.protobuf.Parser<RandomRowFilter> PARSER =
new com.google.protobuf.AbstractParser<RandomRowFilter>() {
public RandomRowFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RandomRowFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RandomRowFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required float chance = 1;
public static final int CHANCE_FIELD_NUMBER = 1;
private float chance_;
/**
* <code>required float chance = 1;</code>
*/
public boolean hasChance() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required float chance = 1;</code>
*/
public float getChance() {
return chance_;
}
private void initFields() {
chance_ = 0F;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasChance()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeFloat(1, chance_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(1, chance_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) obj;
boolean result = true;
result = result && (hasChance() == other.hasChance());
if (hasChance()) {
result = result && (Float.floatToIntBits(getChance()) == Float.floatToIntBits(other.getChance()));
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasChance()) {
hash = (37 * hash) + CHANCE_FIELD_NUMBER;
hash = (53 * hash) + Float.floatToIntBits(
getChance());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.RandomRowFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
chance_ = 0F;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.chance_ = chance_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance()) return this;
if (other.hasChance()) {
setChance(other.getChance());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasChance()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required float chance = 1;
private float chance_ ;
/**
* <code>required float chance = 1;</code>
*/
public boolean hasChance() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required float chance = 1;</code>
*/
public float getChance() {
return chance_;
}
/**
* <code>required float chance = 1;</code>
*/
public Builder setChance(float value) {
bitField0_ |= 0x00000001;
chance_ = value;
onChanged();
return this;
}
/**
* <code>required float chance = 1;</code>
*/
public Builder clearChance() {
bitField0_ = (bitField0_ & ~0x00000001);
chance_ = 0F;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.RandomRowFilter)
}
static {
defaultInstance = new RandomRowFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.RandomRowFilter)
}
public interface RowFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.CompareFilter compare_filter = 1;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
boolean hasCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.RowFilter}
*/
public static final class RowFilter extends
com.google.protobuf.GeneratedMessage
implements RowFilterOrBuilder {
// Use RowFilter.newBuilder() to construct.
private RowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RowFilter defaultInstance;
public static RowFilter getDefaultInstance() {
return defaultInstance;
}
public RowFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RowFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = compareFilter_.toBuilder();
}
compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(compareFilter_);
compareFilter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class);
}
public static com.google.protobuf.Parser<RowFilter> PARSER =
new com.google.protobuf.AbstractParser<RowFilter>() {
public RowFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RowFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RowFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
return compareFilter_;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
return compareFilter_;
}
private void initFields() {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCompareFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getCompareFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, compareFilter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, compareFilter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) obj;
boolean result = true;
result = result && (hasCompareFilter() == other.hasCompareFilter());
if (hasCompareFilter()) {
result = result && getCompareFilter()
.equals(other.getCompareFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCompareFilter()) {
hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
hash = (53 * hash) + getCompareFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.RowFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getCompareFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (compareFilterBuilder_ == null) {
result.compareFilter_ = compareFilter_;
} else {
result.compareFilter_ = compareFilterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance()) return this;
if (other.hasCompareFilter()) {
mergeCompareFilter(other.getCompareFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCompareFilter()) {
return false;
}
if (!getCompareFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
if (compareFilterBuilder_ == null) {
return compareFilter_;
} else {
return compareFilterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
compareFilter_ = value;
onChanged();
} else {
compareFilterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
if (compareFilterBuilder_ == null) {
compareFilter_ = builderForValue.build();
onChanged();
} else {
compareFilterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
compareFilter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
} else {
compareFilter_ = value;
}
onChanged();
} else {
compareFilterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder clearCompareFilter() {
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
onChanged();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompareFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
if (compareFilterBuilder_ != null) {
return compareFilterBuilder_.getMessageOrBuilder();
} else {
return compareFilter_;
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder() {
if (compareFilterBuilder_ == null) {
compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
compareFilter_,
getParentForChildren(),
isClean());
compareFilter_ = null;
}
return compareFilterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.RowFilter)
}
static {
defaultInstance = new RowFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.RowFilter)
}
public interface SingleColumnValueExcludeFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
boolean hasSingleColumnValueFilter();
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter();
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.SingleColumnValueExcludeFilter}
*/
public static final class SingleColumnValueExcludeFilter extends
com.google.protobuf.GeneratedMessage
implements SingleColumnValueExcludeFilterOrBuilder {
// Use SingleColumnValueExcludeFilter.newBuilder() to construct.
private SingleColumnValueExcludeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SingleColumnValueExcludeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SingleColumnValueExcludeFilter defaultInstance;
public static SingleColumnValueExcludeFilter getDefaultInstance() {
return defaultInstance;
}
public SingleColumnValueExcludeFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SingleColumnValueExcludeFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = singleColumnValueFilter_.toBuilder();
}
singleColumnValueFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(singleColumnValueFilter_);
singleColumnValueFilter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class);
}
public static com.google.protobuf.Parser<SingleColumnValueExcludeFilter> PARSER =
new com.google.protobuf.AbstractParser<SingleColumnValueExcludeFilter>() {
public SingleColumnValueExcludeFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SingleColumnValueExcludeFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SingleColumnValueExcludeFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;
public static final int SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_;
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public boolean hasSingleColumnValueFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() {
return singleColumnValueFilter_;
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() {
return singleColumnValueFilter_;
}
private void initFields() {
singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSingleColumnValueFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getSingleColumnValueFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, singleColumnValueFilter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, singleColumnValueFilter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) obj;
boolean result = true;
result = result && (hasSingleColumnValueFilter() == other.hasSingleColumnValueFilter());
if (hasSingleColumnValueFilter()) {
result = result && getSingleColumnValueFilter()
.equals(other.getSingleColumnValueFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSingleColumnValueFilter()) {
hash = (37 * hash) + SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER;
hash = (53 * hash) + getSingleColumnValueFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.SingleColumnValueExcludeFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getSingleColumnValueFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (singleColumnValueFilterBuilder_ == null) {
singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
} else {
singleColumnValueFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (singleColumnValueFilterBuilder_ == null) {
result.singleColumnValueFilter_ = singleColumnValueFilter_;
} else {
result.singleColumnValueFilter_ = singleColumnValueFilterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance()) return this;
if (other.hasSingleColumnValueFilter()) {
mergeSingleColumnValueFilter(other.getSingleColumnValueFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSingleColumnValueFilter()) {
return false;
}
if (!getSingleColumnValueFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> singleColumnValueFilterBuilder_;
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public boolean hasSingleColumnValueFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() {
if (singleColumnValueFilterBuilder_ == null) {
return singleColumnValueFilter_;
} else {
return singleColumnValueFilterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public Builder setSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) {
if (singleColumnValueFilterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
singleColumnValueFilter_ = value;
onChanged();
} else {
singleColumnValueFilterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public Builder setSingleColumnValueFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder builderForValue) {
if (singleColumnValueFilterBuilder_ == null) {
singleColumnValueFilter_ = builderForValue.build();
onChanged();
} else {
singleColumnValueFilterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public Builder mergeSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) {
if (singleColumnValueFilterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
singleColumnValueFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) {
singleColumnValueFilter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(singleColumnValueFilter_).mergeFrom(value).buildPartial();
} else {
singleColumnValueFilter_ = value;
}
onChanged();
} else {
singleColumnValueFilterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public Builder clearSingleColumnValueFilter() {
if (singleColumnValueFilterBuilder_ == null) {
singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
onChanged();
} else {
singleColumnValueFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder getSingleColumnValueFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSingleColumnValueFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() {
if (singleColumnValueFilterBuilder_ != null) {
return singleColumnValueFilterBuilder_.getMessageOrBuilder();
} else {
return singleColumnValueFilter_;
}
}
/**
* <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>
getSingleColumnValueFilterFieldBuilder() {
if (singleColumnValueFilterBuilder_ == null) {
singleColumnValueFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>(
singleColumnValueFilter_,
getParentForChildren(),
isClean());
singleColumnValueFilter_ = null;
}
return singleColumnValueFilterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.SingleColumnValueExcludeFilter)
}
static {
defaultInstance = new SingleColumnValueExcludeFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueExcludeFilter)
}
public interface SingleColumnValueFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes column_family = 1;
/**
* <code>optional bytes column_family = 1;</code>
*/
boolean hasColumnFamily();
/**
* <code>optional bytes column_family = 1;</code>
*/
com.google.protobuf.ByteString getColumnFamily();
// optional bytes column_qualifier = 2;
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
boolean hasColumnQualifier();
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
com.google.protobuf.ByteString getColumnQualifier();
// required .hbase.pb.CompareType compare_op = 3;
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
boolean hasCompareOp();
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp();
// required .hbase.pb.Comparator comparator = 4;
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
boolean hasComparator();
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
// optional bool filter_if_missing = 5;
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
boolean hasFilterIfMissing();
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
boolean getFilterIfMissing();
// optional bool latest_version_only = 6;
/**
* <code>optional bool latest_version_only = 6;</code>
*/
boolean hasLatestVersionOnly();
/**
* <code>optional bool latest_version_only = 6;</code>
*/
boolean getLatestVersionOnly();
}
/**
* Protobuf type {@code hbase.pb.SingleColumnValueFilter}
*/
public static final class SingleColumnValueFilter extends
com.google.protobuf.GeneratedMessage
implements SingleColumnValueFilterOrBuilder {
// Use SingleColumnValueFilter.newBuilder() to construct.
private SingleColumnValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SingleColumnValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SingleColumnValueFilter defaultInstance;
public static SingleColumnValueFilter getDefaultInstance() {
return defaultInstance;
}
public SingleColumnValueFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SingleColumnValueFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
columnFamily_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
columnQualifier_ = input.readBytes();
break;
}
case 24: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(3, rawValue);
} else {
bitField0_ |= 0x00000004;
compareOp_ = value;
}
break;
}
case 34: {
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = comparator_.toBuilder();
}
comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(comparator_);
comparator_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 40: {
bitField0_ |= 0x00000010;
filterIfMissing_ = input.readBool();
break;
}
case 48: {
bitField0_ |= 0x00000020;
latestVersionOnly_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class);
}
public static com.google.protobuf.Parser<SingleColumnValueFilter> PARSER =
new com.google.protobuf.AbstractParser<SingleColumnValueFilter>() {
public SingleColumnValueFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SingleColumnValueFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SingleColumnValueFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes column_family = 1;
public static final int COLUMN_FAMILY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString columnFamily_;
/**
* <code>optional bytes column_family = 1;</code>
*/
public boolean hasColumnFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes column_family = 1;</code>
*/
public com.google.protobuf.ByteString getColumnFamily() {
return columnFamily_;
}
// optional bytes column_qualifier = 2;
public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString columnQualifier_;
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
public boolean hasColumnQualifier() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getColumnQualifier() {
return columnQualifier_;
}
// required .hbase.pb.CompareType compare_op = 3;
public static final int COMPARE_OP_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_;
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
public boolean hasCompareOp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
return compareOp_;
}
// required .hbase.pb.Comparator comparator = 4;
public static final int COMPARATOR_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public boolean hasComparator() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
return comparator_;
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
return comparator_;
}
// optional bool filter_if_missing = 5;
public static final int FILTER_IF_MISSING_FIELD_NUMBER = 5;
private boolean filterIfMissing_;
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
public boolean hasFilterIfMissing() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
public boolean getFilterIfMissing() {
return filterIfMissing_;
}
// optional bool latest_version_only = 6;
public static final int LATEST_VERSION_ONLY_FIELD_NUMBER = 6;
private boolean latestVersionOnly_;
/**
* <code>optional bool latest_version_only = 6;</code>
*/
public boolean hasLatestVersionOnly() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool latest_version_only = 6;</code>
*/
public boolean getLatestVersionOnly() {
return latestVersionOnly_;
}
private void initFields() {
columnFamily_ = com.google.protobuf.ByteString.EMPTY;
columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
filterIfMissing_ = false;
latestVersionOnly_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCompareOp()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasComparator()) {
memoizedIsInitialized = 0;
return false;
}
if (!getComparator().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, columnFamily_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, columnQualifier_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeEnum(3, compareOp_.getNumber());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(4, comparator_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBool(5, filterIfMissing_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeBool(6, latestVersionOnly_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, columnFamily_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, columnQualifier_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, compareOp_.getNumber());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, comparator_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, filterIfMissing_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(6, latestVersionOnly_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) obj;
boolean result = true;
result = result && (hasColumnFamily() == other.hasColumnFamily());
if (hasColumnFamily()) {
result = result && getColumnFamily()
.equals(other.getColumnFamily());
}
result = result && (hasColumnQualifier() == other.hasColumnQualifier());
if (hasColumnQualifier()) {
result = result && getColumnQualifier()
.equals(other.getColumnQualifier());
}
result = result && (hasCompareOp() == other.hasCompareOp());
if (hasCompareOp()) {
result = result &&
(getCompareOp() == other.getCompareOp());
}
result = result && (hasComparator() == other.hasComparator());
if (hasComparator()) {
result = result && getComparator()
.equals(other.getComparator());
}
result = result && (hasFilterIfMissing() == other.hasFilterIfMissing());
if (hasFilterIfMissing()) {
result = result && (getFilterIfMissing()
== other.getFilterIfMissing());
}
result = result && (hasLatestVersionOnly() == other.hasLatestVersionOnly());
if (hasLatestVersionOnly()) {
result = result && (getLatestVersionOnly()
== other.getLatestVersionOnly());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasColumnFamily()) {
hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER;
hash = (53 * hash) + getColumnFamily().hashCode();
}
if (hasColumnQualifier()) {
hash = (37 * hash) + COLUMN_QUALIFIER_FIELD_NUMBER;
hash = (53 * hash) + getColumnQualifier().hashCode();
}
if (hasCompareOp()) {
hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getCompareOp());
}
if (hasComparator()) {
hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
hash = (53 * hash) + getComparator().hashCode();
}
if (hasFilterIfMissing()) {
hash = (37 * hash) + FILTER_IF_MISSING_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getFilterIfMissing());
}
if (hasLatestVersionOnly()) {
hash = (37 * hash) + LATEST_VERSION_ONLY_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getLatestVersionOnly());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.SingleColumnValueFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getComparatorFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
columnFamily_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
bitField0_ = (bitField0_ & ~0x00000004);
if (comparatorBuilder_ == null) {
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
} else {
comparatorBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
filterIfMissing_ = false;
bitField0_ = (bitField0_ & ~0x00000010);
latestVersionOnly_ = false;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.columnFamily_ = columnFamily_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.columnQualifier_ = columnQualifier_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.compareOp_ = compareOp_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
if (comparatorBuilder_ == null) {
result.comparator_ = comparator_;
} else {
result.comparator_ = comparatorBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.filterIfMissing_ = filterIfMissing_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.latestVersionOnly_ = latestVersionOnly_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) return this;
if (other.hasColumnFamily()) {
setColumnFamily(other.getColumnFamily());
}
if (other.hasColumnQualifier()) {
setColumnQualifier(other.getColumnQualifier());
}
if (other.hasCompareOp()) {
setCompareOp(other.getCompareOp());
}
if (other.hasComparator()) {
mergeComparator(other.getComparator());
}
if (other.hasFilterIfMissing()) {
setFilterIfMissing(other.getFilterIfMissing());
}
if (other.hasLatestVersionOnly()) {
setLatestVersionOnly(other.getLatestVersionOnly());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCompareOp()) {
return false;
}
if (!hasComparator()) {
return false;
}
if (!getComparator().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes column_family = 1;
private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes column_family = 1;</code>
*/
public boolean hasColumnFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes column_family = 1;</code>
*/
public com.google.protobuf.ByteString getColumnFamily() {
return columnFamily_;
}
/**
* <code>optional bytes column_family = 1;</code>
*/
public Builder setColumnFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
columnFamily_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes column_family = 1;</code>
*/
public Builder clearColumnFamily() {
bitField0_ = (bitField0_ & ~0x00000001);
columnFamily_ = getDefaultInstance().getColumnFamily();
onChanged();
return this;
}
// optional bytes column_qualifier = 2;
private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
public boolean hasColumnQualifier() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getColumnQualifier() {
return columnQualifier_;
}
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
public Builder setColumnQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
columnQualifier_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes column_qualifier = 2;</code>
*/
public Builder clearColumnQualifier() {
bitField0_ = (bitField0_ & ~0x00000002);
columnQualifier_ = getDefaultInstance().getColumnQualifier();
onChanged();
return this;
}
// required .hbase.pb.CompareType compare_op = 3;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
public boolean hasCompareOp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
return compareOp_;
}
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
compareOp_ = value;
onChanged();
return this;
}
/**
* <code>required .hbase.pb.CompareType compare_op = 3;</code>
*/
public Builder clearCompareOp() {
bitField0_ = (bitField0_ & ~0x00000004);
compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
onChanged();
return this;
}
// required .hbase.pb.Comparator comparator = 4;
private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public boolean hasComparator() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
if (comparatorBuilder_ == null) {
return comparator_;
} else {
return comparatorBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
if (comparatorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
comparator_ = value;
onChanged();
} else {
comparatorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public Builder setComparator(
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
if (comparatorBuilder_ == null) {
comparator_ = builderForValue.build();
onChanged();
} else {
comparatorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
if (comparatorBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
comparator_ =
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
} else {
comparator_ = value;
}
onChanged();
} else {
comparatorBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public Builder clearComparator() {
if (comparatorBuilder_ == null) {
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
onChanged();
} else {
comparatorBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getComparatorFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
if (comparatorBuilder_ != null) {
return comparatorBuilder_.getMessageOrBuilder();
} else {
return comparator_;
}
}
/**
* <code>required .hbase.pb.Comparator comparator = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>
getComparatorFieldBuilder() {
if (comparatorBuilder_ == null) {
comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
comparator_,
getParentForChildren(),
isClean());
comparator_ = null;
}
return comparatorBuilder_;
}
// optional bool filter_if_missing = 5;
private boolean filterIfMissing_ ;
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
public boolean hasFilterIfMissing() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
public boolean getFilterIfMissing() {
return filterIfMissing_;
}
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
public Builder setFilterIfMissing(boolean value) {
bitField0_ |= 0x00000010;
filterIfMissing_ = value;
onChanged();
return this;
}
/**
* <code>optional bool filter_if_missing = 5;</code>
*/
public Builder clearFilterIfMissing() {
bitField0_ = (bitField0_ & ~0x00000010);
filterIfMissing_ = false;
onChanged();
return this;
}
// optional bool latest_version_only = 6;
private boolean latestVersionOnly_ ;
/**
* <code>optional bool latest_version_only = 6;</code>
*/
public boolean hasLatestVersionOnly() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool latest_version_only = 6;</code>
*/
public boolean getLatestVersionOnly() {
return latestVersionOnly_;
}
/**
* <code>optional bool latest_version_only = 6;</code>
*/
public Builder setLatestVersionOnly(boolean value) {
bitField0_ |= 0x00000020;
latestVersionOnly_ = value;
onChanged();
return this;
}
/**
* <code>optional bool latest_version_only = 6;</code>
*/
public Builder clearLatestVersionOnly() {
bitField0_ = (bitField0_ & ~0x00000020);
latestVersionOnly_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.SingleColumnValueFilter)
}
static {
defaultInstance = new SingleColumnValueFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueFilter)
}
public interface SkipFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.Filter filter = 1;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
boolean hasFilter();
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.SkipFilter}
*/
public static final class SkipFilter extends
com.google.protobuf.GeneratedMessage
implements SkipFilterOrBuilder {
// Use SkipFilter.newBuilder() to construct.
private SkipFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SkipFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SkipFilter defaultInstance;
public static SkipFilter getDefaultInstance() {
return defaultInstance;
}
public SkipFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SkipFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = filter_.toBuilder();
}
filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(filter_);
filter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class);
}
public static com.google.protobuf.Parser<SkipFilter> PARSER =
new com.google.protobuf.AbstractParser<SkipFilter>() {
public SkipFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SkipFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SkipFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.Filter filter = 1;
public static final int FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
return filter_;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
return filter_;
}
private void initFields() {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, filter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) obj;
boolean result = true;
result = result && (hasFilter() == other.hasFilter());
if (hasFilter()) {
result = result && getFilter()
.equals(other.getFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFilter()) {
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.SkipFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (filterBuilder_ == null) {
result.filter_ = filter_;
} else {
result.filter_ = filterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance()) return this;
if (other.hasFilter()) {
mergeFilter(other.getFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasFilter()) {
return false;
}
if (!getFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.Filter filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
if (filterBuilder_ == null) {
return filter_;
} else {
return filterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
} else {
filterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder setFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filterBuilder_ == null) {
filter_ = builderForValue.build();
onChanged();
} else {
filterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
filter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
} else {
filter_ = value;
}
onChanged();
} else {
filterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder clearFilter() {
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
onChanged();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
if (filterBuilder_ != null) {
return filterBuilder_.getMessageOrBuilder();
} else {
return filter_;
}
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder() {
if (filterBuilder_ == null) {
filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
filter_,
getParentForChildren(),
isClean());
filter_ = null;
}
return filterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.SkipFilter)
}
static {
defaultInstance = new SkipFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.SkipFilter)
}
public interface TimestampsFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated int64 timestamps = 1 [packed = true];
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
java.util.List<java.lang.Long> getTimestampsList();
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
int getTimestampsCount();
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
long getTimestamps(int index);
// optional bool can_hint = 2;
/**
* <code>optional bool can_hint = 2;</code>
*/
boolean hasCanHint();
/**
* <code>optional bool can_hint = 2;</code>
*/
boolean getCanHint();
}
/**
* Protobuf type {@code hbase.pb.TimestampsFilter}
*/
public static final class TimestampsFilter extends
com.google.protobuf.GeneratedMessage
implements TimestampsFilterOrBuilder {
// Use TimestampsFilter.newBuilder() to construct.
private TimestampsFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TimestampsFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TimestampsFilter defaultInstance;
public static TimestampsFilter getDefaultInstance() {
return defaultInstance;
}
public TimestampsFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TimestampsFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
timestamps_ = new java.util.ArrayList<java.lang.Long>();
mutable_bitField0_ |= 0x00000001;
}
timestamps_.add(input.readInt64());
break;
}
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
timestamps_ = new java.util.ArrayList<java.lang.Long>();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
timestamps_.add(input.readInt64());
}
input.popLimit(limit);
break;
}
case 16: {
bitField0_ |= 0x00000001;
canHint_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
timestamps_ = java.util.Collections.unmodifiableList(timestamps_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class);
}
public static com.google.protobuf.Parser<TimestampsFilter> PARSER =
new com.google.protobuf.AbstractParser<TimestampsFilter>() {
public TimestampsFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TimestampsFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TimestampsFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated int64 timestamps = 1 [packed = true];
public static final int TIMESTAMPS_FIELD_NUMBER = 1;
private java.util.List<java.lang.Long> timestamps_;
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public java.util.List<java.lang.Long>
getTimestampsList() {
return timestamps_;
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public int getTimestampsCount() {
return timestamps_.size();
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public long getTimestamps(int index) {
return timestamps_.get(index);
}
private int timestampsMemoizedSerializedSize = -1;
// optional bool can_hint = 2;
public static final int CAN_HINT_FIELD_NUMBER = 2;
private boolean canHint_;
/**
* <code>optional bool can_hint = 2;</code>
*/
public boolean hasCanHint() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool can_hint = 2;</code>
*/
public boolean getCanHint() {
return canHint_;
}
private void initFields() {
timestamps_ = java.util.Collections.emptyList();
canHint_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (getTimestampsList().size() > 0) {
output.writeRawVarint32(10);
output.writeRawVarint32(timestampsMemoizedSerializedSize);
}
for (int i = 0; i < timestamps_.size(); i++) {
output.writeInt64NoTag(timestamps_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBool(2, canHint_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < timestamps_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeInt64SizeNoTag(timestamps_.get(i));
}
size += dataSize;
if (!getTimestampsList().isEmpty()) {
size += 1;
size += com.google.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
timestampsMemoizedSerializedSize = dataSize;
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, canHint_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) obj;
boolean result = true;
result = result && getTimestampsList()
.equals(other.getTimestampsList());
result = result && (hasCanHint() == other.hasCanHint());
if (hasCanHint()) {
result = result && (getCanHint()
== other.getCanHint());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTimestampsCount() > 0) {
hash = (37 * hash) + TIMESTAMPS_FIELD_NUMBER;
hash = (53 * hash) + getTimestampsList().hashCode();
}
if (hasCanHint()) {
hash = (37 * hash) + CAN_HINT_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getCanHint());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.TimestampsFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
timestamps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
canHint_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
timestamps_ = java.util.Collections.unmodifiableList(timestamps_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.timestamps_ = timestamps_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.canHint_ = canHint_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance()) return this;
if (!other.timestamps_.isEmpty()) {
if (timestamps_.isEmpty()) {
timestamps_ = other.timestamps_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTimestampsIsMutable();
timestamps_.addAll(other.timestamps_);
}
onChanged();
}
if (other.hasCanHint()) {
setCanHint(other.getCanHint());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated int64 timestamps = 1 [packed = true];
private java.util.List<java.lang.Long> timestamps_ = java.util.Collections.emptyList();
private void ensureTimestampsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
timestamps_ = new java.util.ArrayList<java.lang.Long>(timestamps_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public java.util.List<java.lang.Long>
getTimestampsList() {
return java.util.Collections.unmodifiableList(timestamps_);
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public int getTimestampsCount() {
return timestamps_.size();
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public long getTimestamps(int index) {
return timestamps_.get(index);
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public Builder setTimestamps(
int index, long value) {
ensureTimestampsIsMutable();
timestamps_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public Builder addTimestamps(long value) {
ensureTimestampsIsMutable();
timestamps_.add(value);
onChanged();
return this;
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public Builder addAllTimestamps(
java.lang.Iterable<? extends java.lang.Long> values) {
ensureTimestampsIsMutable();
super.addAll(values, timestamps_);
onChanged();
return this;
}
/**
* <code>repeated int64 timestamps = 1 [packed = true];</code>
*/
public Builder clearTimestamps() {
timestamps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// optional bool can_hint = 2;
private boolean canHint_ ;
/**
* <code>optional bool can_hint = 2;</code>
*/
public boolean hasCanHint() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool can_hint = 2;</code>
*/
public boolean getCanHint() {
return canHint_;
}
/**
* <code>optional bool can_hint = 2;</code>
*/
public Builder setCanHint(boolean value) {
bitField0_ |= 0x00000002;
canHint_ = value;
onChanged();
return this;
}
/**
* <code>optional bool can_hint = 2;</code>
*/
public Builder clearCanHint() {
bitField0_ = (bitField0_ & ~0x00000002);
canHint_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.TimestampsFilter)
}
static {
defaultInstance = new TimestampsFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.TimestampsFilter)
}
public interface ValueFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.CompareFilter compare_filter = 1;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
boolean hasCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.ValueFilter}
*/
public static final class ValueFilter extends
com.google.protobuf.GeneratedMessage
implements ValueFilterOrBuilder {
// Use ValueFilter.newBuilder() to construct.
private ValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ValueFilter defaultInstance;
public static ValueFilter getDefaultInstance() {
return defaultInstance;
}
public ValueFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ValueFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = compareFilter_.toBuilder();
}
compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(compareFilter_);
compareFilter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class);
}
public static com.google.protobuf.Parser<ValueFilter> PARSER =
new com.google.protobuf.AbstractParser<ValueFilter>() {
public ValueFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ValueFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ValueFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
return compareFilter_;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
return compareFilter_;
}
private void initFields() {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasCompareFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getCompareFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, compareFilter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, compareFilter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) obj;
boolean result = true;
result = result && (hasCompareFilter() == other.hasCompareFilter());
if (hasCompareFilter()) {
result = result && getCompareFilter()
.equals(other.getCompareFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCompareFilter()) {
hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
hash = (53 * hash) + getCompareFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ValueFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getCompareFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (compareFilterBuilder_ == null) {
result.compareFilter_ = compareFilter_;
} else {
result.compareFilter_ = compareFilterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance()) return this;
if (other.hasCompareFilter()) {
mergeCompareFilter(other.getCompareFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasCompareFilter()) {
return false;
}
if (!getCompareFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.CompareFilter compare_filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public boolean hasCompareFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
if (compareFilterBuilder_ == null) {
return compareFilter_;
} else {
return compareFilterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
compareFilter_ = value;
onChanged();
} else {
compareFilterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder setCompareFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
if (compareFilterBuilder_ == null) {
compareFilter_ = builderForValue.build();
onChanged();
} else {
compareFilterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
if (compareFilterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
compareFilter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
} else {
compareFilter_ = value;
}
onChanged();
} else {
compareFilterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public Builder clearCompareFilter() {
if (compareFilterBuilder_ == null) {
compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
onChanged();
} else {
compareFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompareFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
if (compareFilterBuilder_ != null) {
return compareFilterBuilder_.getMessageOrBuilder();
} else {
return compareFilter_;
}
}
/**
* <code>required .hbase.pb.CompareFilter compare_filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder() {
if (compareFilterBuilder_ == null) {
compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
compareFilter_,
getParentForChildren(),
isClean());
compareFilter_ = null;
}
return compareFilterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ValueFilter)
}
static {
defaultInstance = new ValueFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ValueFilter)
}
public interface WhileMatchFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.Filter filter = 1;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
boolean hasFilter();
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.WhileMatchFilter}
*/
public static final class WhileMatchFilter extends
com.google.protobuf.GeneratedMessage
implements WhileMatchFilterOrBuilder {
// Use WhileMatchFilter.newBuilder() to construct.
private WhileMatchFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private WhileMatchFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final WhileMatchFilter defaultInstance;
public static WhileMatchFilter getDefaultInstance() {
return defaultInstance;
}
public WhileMatchFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private WhileMatchFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = filter_.toBuilder();
}
filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(filter_);
filter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class);
}
public static com.google.protobuf.Parser<WhileMatchFilter> PARSER =
new com.google.protobuf.AbstractParser<WhileMatchFilter>() {
public WhileMatchFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new WhileMatchFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<WhileMatchFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.Filter filter = 1;
public static final int FILTER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
return filter_;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
return filter_;
}
private void initFields() {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!getFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, filter_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) obj;
boolean result = true;
result = result && (hasFilter() == other.hasFilter());
if (hasFilter()) {
result = result && getFilter()
.equals(other.getFilter());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFilter()) {
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.WhileMatchFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (filterBuilder_ == null) {
result.filter_ = filter_;
} else {
result.filter_ = filterBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance()) return this;
if (other.hasFilter()) {
mergeFilter(other.getFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasFilter()) {
return false;
}
if (!getFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.Filter filter = 1;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
if (filterBuilder_ == null) {
return filter_;
} else {
return filterBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
} else {
filterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder setFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filterBuilder_ == null) {
filter_ = builderForValue.build();
onChanged();
} else {
filterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
filter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
} else {
filter_ = value;
}
onChanged();
} else {
filterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public Builder clearFilter() {
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
onChanged();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getFilterFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
if (filterBuilder_ != null) {
return filterBuilder_.getMessageOrBuilder();
} else {
return filter_;
}
}
/**
* <code>required .hbase.pb.Filter filter = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder() {
if (filterBuilder_ == null) {
filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
filter_,
getParentForChildren(),
isClean());
filter_ = null;
}
return filterBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.WhileMatchFilter)
}
static {
defaultInstance = new WhileMatchFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.WhileMatchFilter)
}
public interface FilterAllFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hbase.pb.FilterAllFilter}
*/
public static final class FilterAllFilter extends
com.google.protobuf.GeneratedMessage
implements FilterAllFilterOrBuilder {
// Use FilterAllFilter.newBuilder() to construct.
private FilterAllFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FilterAllFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FilterAllFilter defaultInstance;
public static FilterAllFilter getDefaultInstance() {
return defaultInstance;
}
public FilterAllFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FilterAllFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.Builder.class);
}
public static com.google.protobuf.Parser<FilterAllFilter> PARSER =
new com.google.protobuf.AbstractParser<FilterAllFilter>() {
public FilterAllFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FilterAllFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FilterAllFilter> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.FilterAllFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.FilterAllFilter)
}
static {
defaultInstance = new FilterAllFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.FilterAllFilter)
}
public interface RowRangeOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes start_row = 1;
/**
* <code>optional bytes start_row = 1;</code>
*/
boolean hasStartRow();
/**
* <code>optional bytes start_row = 1;</code>
*/
com.google.protobuf.ByteString getStartRow();
// optional bool start_row_inclusive = 2;
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
boolean hasStartRowInclusive();
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
boolean getStartRowInclusive();
// optional bytes stop_row = 3;
/**
* <code>optional bytes stop_row = 3;</code>
*/
boolean hasStopRow();
/**
* <code>optional bytes stop_row = 3;</code>
*/
com.google.protobuf.ByteString getStopRow();
// optional bool stop_row_inclusive = 4;
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
boolean hasStopRowInclusive();
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
boolean getStopRowInclusive();
}
/**
* Protobuf type {@code hbase.pb.RowRange}
*/
public static final class RowRange extends
com.google.protobuf.GeneratedMessage
implements RowRangeOrBuilder {
// Use RowRange.newBuilder() to construct.
private RowRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RowRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RowRange defaultInstance;
public static RowRange getDefaultInstance() {
return defaultInstance;
}
public RowRange getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RowRange(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
startRow_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
startRowInclusive_ = input.readBool();
break;
}
case 26: {
bitField0_ |= 0x00000004;
stopRow_ = input.readBytes();
break;
}
case 32: {
bitField0_ |= 0x00000008;
stopRowInclusive_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder.class);
}
public static com.google.protobuf.Parser<RowRange> PARSER =
new com.google.protobuf.AbstractParser<RowRange>() {
public RowRange parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RowRange(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RowRange> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes start_row = 1;
public static final int START_ROW_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString startRow_;
/**
* <code>optional bytes start_row = 1;</code>
*/
public boolean hasStartRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes start_row = 1;</code>
*/
public com.google.protobuf.ByteString getStartRow() {
return startRow_;
}
// optional bool start_row_inclusive = 2;
public static final int START_ROW_INCLUSIVE_FIELD_NUMBER = 2;
private boolean startRowInclusive_;
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
public boolean hasStartRowInclusive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
public boolean getStartRowInclusive() {
return startRowInclusive_;
}
// optional bytes stop_row = 3;
public static final int STOP_ROW_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString stopRow_;
/**
* <code>optional bytes stop_row = 3;</code>
*/
public boolean hasStopRow() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes stop_row = 3;</code>
*/
public com.google.protobuf.ByteString getStopRow() {
return stopRow_;
}
// optional bool stop_row_inclusive = 4;
public static final int STOP_ROW_INCLUSIVE_FIELD_NUMBER = 4;
private boolean stopRowInclusive_;
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
public boolean hasStopRowInclusive() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
public boolean getStopRowInclusive() {
return stopRowInclusive_;
}
private void initFields() {
startRow_ = com.google.protobuf.ByteString.EMPTY;
startRowInclusive_ = false;
stopRow_ = com.google.protobuf.ByteString.EMPTY;
stopRowInclusive_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, startRow_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(2, startRowInclusive_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, stopRow_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBool(4, stopRowInclusive_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, startRow_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, startRowInclusive_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, stopRow_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, stopRowInclusive_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) obj;
boolean result = true;
result = result && (hasStartRow() == other.hasStartRow());
if (hasStartRow()) {
result = result && getStartRow()
.equals(other.getStartRow());
}
result = result && (hasStartRowInclusive() == other.hasStartRowInclusive());
if (hasStartRowInclusive()) {
result = result && (getStartRowInclusive()
== other.getStartRowInclusive());
}
result = result && (hasStopRow() == other.hasStopRow());
if (hasStopRow()) {
result = result && getStopRow()
.equals(other.getStopRow());
}
result = result && (hasStopRowInclusive() == other.hasStopRowInclusive());
if (hasStopRowInclusive()) {
result = result && (getStopRowInclusive()
== other.getStopRowInclusive());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasStartRow()) {
hash = (37 * hash) + START_ROW_FIELD_NUMBER;
hash = (53 * hash) + getStartRow().hashCode();
}
if (hasStartRowInclusive()) {
hash = (37 * hash) + START_ROW_INCLUSIVE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getStartRowInclusive());
}
if (hasStopRow()) {
hash = (37 * hash) + STOP_ROW_FIELD_NUMBER;
hash = (53 * hash) + getStopRow().hashCode();
}
if (hasStopRowInclusive()) {
hash = (37 * hash) + STOP_ROW_INCLUSIVE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getStopRowInclusive());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.RowRange}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
startRow_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
startRowInclusive_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
stopRow_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
stopRowInclusive_ = false;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.startRow_ = startRow_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.startRowInclusive_ = startRowInclusive_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.stopRow_ = stopRow_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.stopRowInclusive_ = stopRowInclusive_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance()) return this;
if (other.hasStartRow()) {
setStartRow(other.getStartRow());
}
if (other.hasStartRowInclusive()) {
setStartRowInclusive(other.getStartRowInclusive());
}
if (other.hasStopRow()) {
setStopRow(other.getStopRow());
}
if (other.hasStopRowInclusive()) {
setStopRowInclusive(other.getStopRowInclusive());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes start_row = 1;
private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes start_row = 1;</code>
*/
public boolean hasStartRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes start_row = 1;</code>
*/
public com.google.protobuf.ByteString getStartRow() {
return startRow_;
}
/**
* <code>optional bytes start_row = 1;</code>
*/
public Builder setStartRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
startRow_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes start_row = 1;</code>
*/
public Builder clearStartRow() {
bitField0_ = (bitField0_ & ~0x00000001);
startRow_ = getDefaultInstance().getStartRow();
onChanged();
return this;
}
// optional bool start_row_inclusive = 2;
private boolean startRowInclusive_ ;
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
public boolean hasStartRowInclusive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
public boolean getStartRowInclusive() {
return startRowInclusive_;
}
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
public Builder setStartRowInclusive(boolean value) {
bitField0_ |= 0x00000002;
startRowInclusive_ = value;
onChanged();
return this;
}
/**
* <code>optional bool start_row_inclusive = 2;</code>
*/
public Builder clearStartRowInclusive() {
bitField0_ = (bitField0_ & ~0x00000002);
startRowInclusive_ = false;
onChanged();
return this;
}
// optional bytes stop_row = 3;
private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes stop_row = 3;</code>
*/
public boolean hasStopRow() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes stop_row = 3;</code>
*/
public com.google.protobuf.ByteString getStopRow() {
return stopRow_;
}
/**
* <code>optional bytes stop_row = 3;</code>
*/
public Builder setStopRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
stopRow_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes stop_row = 3;</code>
*/
public Builder clearStopRow() {
bitField0_ = (bitField0_ & ~0x00000004);
stopRow_ = getDefaultInstance().getStopRow();
onChanged();
return this;
}
// optional bool stop_row_inclusive = 4;
private boolean stopRowInclusive_ ;
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
public boolean hasStopRowInclusive() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
public boolean getStopRowInclusive() {
return stopRowInclusive_;
}
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
public Builder setStopRowInclusive(boolean value) {
bitField0_ |= 0x00000008;
stopRowInclusive_ = value;
onChanged();
return this;
}
/**
* <code>optional bool stop_row_inclusive = 4;</code>
*/
public Builder clearStopRowInclusive() {
bitField0_ = (bitField0_ & ~0x00000008);
stopRowInclusive_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.RowRange)
}
static {
defaultInstance = new RowRange(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.RowRange)
}
public interface MultiRowRangeFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.RowRange row_range_list = 1;
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>
getRowRangeListList();
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index);
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
int getRowRangeListCount();
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListOrBuilderList();
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.MultiRowRangeFilter}
*/
public static final class MultiRowRangeFilter extends
com.google.protobuf.GeneratedMessage
implements MultiRowRangeFilterOrBuilder {
// Use MultiRowRangeFilter.newBuilder() to construct.
private MultiRowRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRowRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiRowRangeFilter defaultInstance;
public static MultiRowRangeFilter getDefaultInstance() {
return defaultInstance;
}
public MultiRowRangeFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRowRangeFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
rowRangeList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>();
mutable_bitField0_ |= 0x00000001;
}
rowRangeList_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
rowRangeList_ = java.util.Collections.unmodifiableList(rowRangeList_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.Builder.class);
}
public static com.google.protobuf.Parser<MultiRowRangeFilter> PARSER =
new com.google.protobuf.AbstractParser<MultiRowRangeFilter>() {
public MultiRowRangeFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRowRangeFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRowRangeFilter> getParserForType() {
return PARSER;
}
// repeated .hbase.pb.RowRange row_range_list = 1;
public static final int ROW_RANGE_LIST_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> rowRangeList_;
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> getRowRangeListList() {
return rowRangeList_;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListOrBuilderList() {
return rowRangeList_;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public int getRowRangeListCount() {
return rowRangeList_.size();
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index) {
return rowRangeList_.get(index);
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder(
int index) {
return rowRangeList_.get(index);
}
private void initFields() {
rowRangeList_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < rowRangeList_.size(); i++) {
output.writeMessage(1, rowRangeList_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < rowRangeList_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, rowRangeList_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) obj;
boolean result = true;
result = result && getRowRangeListList()
.equals(other.getRowRangeListList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getRowRangeListCount() > 0) {
hash = (37 * hash) + ROW_RANGE_LIST_FIELD_NUMBER;
hash = (53 * hash) + getRowRangeListList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MultiRowRangeFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRowRangeListFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (rowRangeListBuilder_ == null) {
rowRangeList_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
rowRangeListBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter(this);
int from_bitField0_ = bitField0_;
if (rowRangeListBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
rowRangeList_ = java.util.Collections.unmodifiableList(rowRangeList_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.rowRangeList_ = rowRangeList_;
} else {
result.rowRangeList_ = rowRangeListBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.getDefaultInstance()) return this;
if (rowRangeListBuilder_ == null) {
if (!other.rowRangeList_.isEmpty()) {
if (rowRangeList_.isEmpty()) {
rowRangeList_ = other.rowRangeList_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRowRangeListIsMutable();
rowRangeList_.addAll(other.rowRangeList_);
}
onChanged();
}
} else {
if (!other.rowRangeList_.isEmpty()) {
if (rowRangeListBuilder_.isEmpty()) {
rowRangeListBuilder_.dispose();
rowRangeListBuilder_ = null;
rowRangeList_ = other.rowRangeList_;
bitField0_ = (bitField0_ & ~0x00000001);
rowRangeListBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getRowRangeListFieldBuilder() : null;
} else {
rowRangeListBuilder_.addAllMessages(other.rowRangeList_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.RowRange row_range_list = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> rowRangeList_ =
java.util.Collections.emptyList();
private void ensureRowRangeListIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
rowRangeList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>(rowRangeList_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder> rowRangeListBuilder_;
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> getRowRangeListList() {
if (rowRangeListBuilder_ == null) {
return java.util.Collections.unmodifiableList(rowRangeList_);
} else {
return rowRangeListBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public int getRowRangeListCount() {
if (rowRangeListBuilder_ == null) {
return rowRangeList_.size();
} else {
return rowRangeListBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index) {
if (rowRangeListBuilder_ == null) {
return rowRangeList_.get(index);
} else {
return rowRangeListBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder setRowRangeList(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) {
if (rowRangeListBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRowRangeListIsMutable();
rowRangeList_.set(index, value);
onChanged();
} else {
rowRangeListBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder setRowRangeList(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) {
if (rowRangeListBuilder_ == null) {
ensureRowRangeListIsMutable();
rowRangeList_.set(index, builderForValue.build());
onChanged();
} else {
rowRangeListBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder addRowRangeList(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) {
if (rowRangeListBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRowRangeListIsMutable();
rowRangeList_.add(value);
onChanged();
} else {
rowRangeListBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder addRowRangeList(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) {
if (rowRangeListBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRowRangeListIsMutable();
rowRangeList_.add(index, value);
onChanged();
} else {
rowRangeListBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder addRowRangeList(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) {
if (rowRangeListBuilder_ == null) {
ensureRowRangeListIsMutable();
rowRangeList_.add(builderForValue.build());
onChanged();
} else {
rowRangeListBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder addRowRangeList(
int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) {
if (rowRangeListBuilder_ == null) {
ensureRowRangeListIsMutable();
rowRangeList_.add(index, builderForValue.build());
onChanged();
} else {
rowRangeListBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder addAllRowRangeList(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> values) {
if (rowRangeListBuilder_ == null) {
ensureRowRangeListIsMutable();
super.addAll(values, rowRangeList_);
onChanged();
} else {
rowRangeListBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder clearRowRangeList() {
if (rowRangeListBuilder_ == null) {
rowRangeList_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
rowRangeListBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public Builder removeRowRangeList(int index) {
if (rowRangeListBuilder_ == null) {
ensureRowRangeListIsMutable();
rowRangeList_.remove(index);
onChanged();
} else {
rowRangeListBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder getRowRangeListBuilder(
int index) {
return getRowRangeListFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder(
int index) {
if (rowRangeListBuilder_ == null) {
return rowRangeList_.get(index); } else {
return rowRangeListBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListOrBuilderList() {
if (rowRangeListBuilder_ != null) {
return rowRangeListBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(rowRangeList_);
}
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder addRowRangeListBuilder() {
return getRowRangeListFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder addRowRangeListBuilder(
int index) {
return getRowRangeListFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RowRange row_range_list = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder>
getRowRangeListBuilderList() {
return getRowRangeListFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListFieldBuilder() {
if (rowRangeListBuilder_ == null) {
rowRangeListBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>(
rowRangeList_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
rowRangeList_ = null;
}
return rowRangeListBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MultiRowRangeFilter)
}
static {
defaultInstance = new MultiRowRangeFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MultiRowRangeFilter)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Filter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Filter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ColumnCountGetFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ColumnPaginationFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ColumnPrefixFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ColumnRangeFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CompareFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CompareFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_DependentColumnFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_FamilyFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_FamilyFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_FilterList_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_FilterList_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_FilterWrapper_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_FilterWrapper_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_FuzzyRowFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_InclusiveStopFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_KeyOnlyFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_PageFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_PageFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_PrefixFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_PrefixFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_QualifierFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_QualifierFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RandomRowFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RowFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_RowFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SingleColumnValueFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SkipFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_SkipFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_TimestampsFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ValueFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ValueFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_WhileMatchFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_FilterAllFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RowRange_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_RowRange_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MultiRowRangeFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\014Filter.proto\022\010hbase.pb\032\013HBase.proto\032\020C" +
"omparator.proto\"1\n\006Filter\022\014\n\004name\030\001 \002(\t\022" +
"\031\n\021serialized_filter\030\002 \001(\014\"%\n\024ColumnCoun" +
"tGetFilter\022\r\n\005limit\030\001 \002(\005\"N\n\026ColumnPagin" +
"ationFilter\022\r\n\005limit\030\001 \002(\005\022\016\n\006offset\030\002 \001" +
"(\005\022\025\n\rcolumn_offset\030\003 \001(\014\"$\n\022ColumnPrefi" +
"xFilter\022\016\n\006prefix\030\001 \002(\014\"w\n\021ColumnRangeFi" +
"lter\022\022\n\nmin_column\030\001 \001(\014\022\034\n\024min_column_i" +
"nclusive\030\002 \001(\010\022\022\n\nmax_column\030\003 \001(\014\022\034\n\024ma" +
"x_column_inclusive\030\004 \001(\010\"d\n\rCompareFilte",
"r\022)\n\ncompare_op\030\001 \002(\0162\025.hbase.pb.Compare" +
"Type\022(\n\ncomparator\030\002 \001(\0132\024.hbase.pb.Comp" +
"arator\"\230\001\n\025DependentColumnFilter\022/\n\016comp" +
"are_filter\030\001 \002(\0132\027.hbase.pb.CompareFilte" +
"r\022\025\n\rcolumn_family\030\002 \001(\014\022\030\n\020column_quali" +
"fier\030\003 \001(\014\022\035\n\025drop_dependent_column\030\004 \001(" +
"\010\"?\n\014FamilyFilter\022/\n\016compare_filter\030\001 \002(" +
"\0132\027.hbase.pb.CompareFilter\"\222\001\n\nFilterLis" +
"t\022/\n\010operator\030\001 \002(\0162\035.hbase.pb.FilterLis" +
"t.Operator\022!\n\007filters\030\002 \003(\0132\020.hbase.pb.F",
"ilter\"0\n\010Operator\022\021\n\rMUST_PASS_ALL\020\001\022\021\n\r" +
"MUST_PASS_ONE\020\002\"1\n\rFilterWrapper\022 \n\006filt" +
"er\030\001 \002(\0132\020.hbase.pb.Filter\"\024\n\022FirstKeyOn" +
"lyFilter\";\n%FirstKeyValueMatchingQualifi" +
"ersFilter\022\022\n\nqualifiers\030\001 \003(\014\"C\n\016FuzzyRo" +
"wFilter\0221\n\017fuzzy_keys_data\030\001 \003(\0132\030.hbase" +
".pb.BytesBytesPair\"+\n\023InclusiveStopFilte" +
"r\022\024\n\014stop_row_key\030\001 \001(\014\"#\n\rKeyOnlyFilter" +
"\022\022\n\nlen_as_val\030\001 \002(\010\"5\n\032MultipleColumnPr" +
"efixFilter\022\027\n\017sorted_prefixes\030\001 \003(\014\"\037\n\nP",
"ageFilter\022\021\n\tpage_size\030\001 \002(\003\"\036\n\014PrefixFi" +
"lter\022\016\n\006prefix\030\001 \001(\014\"B\n\017QualifierFilter\022" +
"/\n\016compare_filter\030\001 \002(\0132\027.hbase.pb.Compa" +
"reFilter\"!\n\017RandomRowFilter\022\016\n\006chance\030\001 " +
"\002(\002\"<\n\tRowFilter\022/\n\016compare_filter\030\001 \002(\013" +
"2\027.hbase.pb.CompareFilter\"g\n\036SingleColum" +
"nValueExcludeFilter\022E\n\032single_column_val" +
"ue_filter\030\001 \002(\0132!.hbase.pb.SingleColumnV" +
"alueFilter\"\327\001\n\027SingleColumnValueFilter\022\025" +
"\n\rcolumn_family\030\001 \001(\014\022\030\n\020column_qualifie",
"r\030\002 \001(\014\022)\n\ncompare_op\030\003 \002(\0162\025.hbase.pb.C" +
"ompareType\022(\n\ncomparator\030\004 \002(\0132\024.hbase.p" +
"b.Comparator\022\031\n\021filter_if_missing\030\005 \001(\010\022" +
"\033\n\023latest_version_only\030\006 \001(\010\".\n\nSkipFilt" +
"er\022 \n\006filter\030\001 \002(\0132\020.hbase.pb.Filter\"<\n\020" +
"TimestampsFilter\022\026\n\ntimestamps\030\001 \003(\003B\002\020\001" +
"\022\020\n\010can_hint\030\002 \001(\010\">\n\013ValueFilter\022/\n\016com" +
"pare_filter\030\001 \002(\0132\027.hbase.pb.CompareFilt" +
"er\"4\n\020WhileMatchFilter\022 \n\006filter\030\001 \002(\0132\020" +
".hbase.pb.Filter\"\021\n\017FilterAllFilter\"h\n\010R",
"owRange\022\021\n\tstart_row\030\001 \001(\014\022\033\n\023start_row_" +
"inclusive\030\002 \001(\010\022\020\n\010stop_row\030\003 \001(\014\022\032\n\022sto" +
"p_row_inclusive\030\004 \001(\010\"A\n\023MultiRowRangeFi" +
"lter\022*\n\016row_range_list\030\001 \003(\0132\022.hbase.pb." +
"RowRangeBB\n*org.apache.hadoop.hbase.prot" +
"obuf.generatedB\014FilterProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_hbase_pb_Filter_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_Filter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Filter_descriptor,
new java.lang.String[] { "Name", "SerializedFilter", });
internal_static_hbase_pb_ColumnCountGetFilter_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ColumnCountGetFilter_descriptor,
new java.lang.String[] { "Limit", });
internal_static_hbase_pb_ColumnPaginationFilter_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ColumnPaginationFilter_descriptor,
new java.lang.String[] { "Limit", "Offset", "ColumnOffset", });
internal_static_hbase_pb_ColumnPrefixFilter_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ColumnPrefixFilter_descriptor,
new java.lang.String[] { "Prefix", });
internal_static_hbase_pb_ColumnRangeFilter_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ColumnRangeFilter_descriptor,
new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", });
internal_static_hbase_pb_CompareFilter_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hbase_pb_CompareFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CompareFilter_descriptor,
new java.lang.String[] { "CompareOp", "Comparator", });
internal_static_hbase_pb_DependentColumnFilter_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_DependentColumnFilter_descriptor,
new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", });
internal_static_hbase_pb_FamilyFilter_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hbase_pb_FamilyFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_FamilyFilter_descriptor,
new java.lang.String[] { "CompareFilter", });
internal_static_hbase_pb_FilterList_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hbase_pb_FilterList_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_FilterList_descriptor,
new java.lang.String[] { "Operator", "Filters", });
internal_static_hbase_pb_FilterWrapper_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_hbase_pb_FilterWrapper_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_FilterWrapper_descriptor,
new java.lang.String[] { "Filter", });
internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor,
new java.lang.String[] { });
internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor,
new java.lang.String[] { "Qualifiers", });
internal_static_hbase_pb_FuzzyRowFilter_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_FuzzyRowFilter_descriptor,
new java.lang.String[] { "FuzzyKeysData", });
internal_static_hbase_pb_InclusiveStopFilter_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_InclusiveStopFilter_descriptor,
new java.lang.String[] { "StopRowKey", });
internal_static_hbase_pb_KeyOnlyFilter_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_KeyOnlyFilter_descriptor,
new java.lang.String[] { "LenAsVal", });
internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor,
new java.lang.String[] { "SortedPrefixes", });
internal_static_hbase_pb_PageFilter_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_hbase_pb_PageFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_PageFilter_descriptor,
new java.lang.String[] { "PageSize", });
internal_static_hbase_pb_PrefixFilter_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_hbase_pb_PrefixFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_PrefixFilter_descriptor,
new java.lang.String[] { "Prefix", });
internal_static_hbase_pb_QualifierFilter_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_hbase_pb_QualifierFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_QualifierFilter_descriptor,
new java.lang.String[] { "CompareFilter", });
internal_static_hbase_pb_RandomRowFilter_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_RandomRowFilter_descriptor,
new java.lang.String[] { "Chance", });
internal_static_hbase_pb_RowFilter_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_hbase_pb_RowFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_RowFilter_descriptor,
new java.lang.String[] { "CompareFilter", });
internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor,
new java.lang.String[] { "SingleColumnValueFilter", });
internal_static_hbase_pb_SingleColumnValueFilter_descriptor =
getDescriptor().getMessageTypes().get(22);
internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SingleColumnValueFilter_descriptor,
new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FilterIfMissing", "LatestVersionOnly", });
internal_static_hbase_pb_SkipFilter_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_hbase_pb_SkipFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SkipFilter_descriptor,
new java.lang.String[] { "Filter", });
internal_static_hbase_pb_TimestampsFilter_descriptor =
getDescriptor().getMessageTypes().get(24);
internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_TimestampsFilter_descriptor,
new java.lang.String[] { "Timestamps", "CanHint", });
internal_static_hbase_pb_ValueFilter_descriptor =
getDescriptor().getMessageTypes().get(25);
internal_static_hbase_pb_ValueFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ValueFilter_descriptor,
new java.lang.String[] { "CompareFilter", });
internal_static_hbase_pb_WhileMatchFilter_descriptor =
getDescriptor().getMessageTypes().get(26);
internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_WhileMatchFilter_descriptor,
new java.lang.String[] { "Filter", });
internal_static_hbase_pb_FilterAllFilter_descriptor =
getDescriptor().getMessageTypes().get(27);
internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_FilterAllFilter_descriptor,
new java.lang.String[] { });
internal_static_hbase_pb_RowRange_descriptor =
getDescriptor().getMessageTypes().get(28);
internal_static_hbase_pb_RowRange_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_RowRange_descriptor,
new java.lang.String[] { "StartRow", "StartRowInclusive", "StopRow", "StopRowInclusive", });
internal_static_hbase_pb_MultiRowRangeFilter_descriptor =
getDescriptor().getMessageTypes().get(29);
internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MultiRowRangeFilter_descriptor,
new java.lang.String[] { "RowRangeList", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
| apache-2.0 |
nroduit/XChart | xchart-demo/src/main/java/org/knowm/xchart/demo/charts/date/DateChart09.java | 2383 | package org.knowm.xchart.demo.charts.date;
import java.time.LocalDateTime;
import java.time.Month;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.knowm.xchart.SwingWrapper;
import org.knowm.xchart.XYChart;
import org.knowm.xchart.XYChartBuilder;
import org.knowm.xchart.demo.charts.ExampleChart;
/**
* Year scale
*
* <p>Demonstrates the following:
*
* <ul>
* <li>Rotated 90 degrees X-Axis labels
* <li>Setting custom X-Axis tick labels
* <li>Setting custom cursor tool tip text
*/
public class DateChart09 implements ExampleChart<XYChart> {
public static void main(String[] args) {
ExampleChart<XYChart> exampleChart = new DateChart09();
XYChart chart = exampleChart.getChart();
new SwingWrapper<>(chart).displayChart();
}
@Override
public XYChart getChart() {
// Create Chart
XYChart chart =
new XYChartBuilder().width(800).height(600).title(getClass().getSimpleName()).build();
// Customize Chart
chart.getStyler().setLegendVisible(false);
chart.getStyler().setXAxisLabelRotation(90);
// Series
List<Integer> xData = IntStream.range(0, 365).boxed().collect(Collectors.toList());
Random random = new Random();
List<Double> yData =
IntStream.range(0, xData.size())
.mapToDouble(x -> random.nextDouble())
.boxed()
.collect(Collectors.toList());
chart.addSeries("blah", xData, yData);
// set custom X-Axis tick labels
LocalDateTime startTime = LocalDateTime.of(2001, Month.JANUARY, 1, 0, 0, 0);
DateTimeFormatter xTickFormatter = DateTimeFormatter.ofPattern("LLL");
chart
.getStyler()
.setxAxisTickLabelsFormattingFunction(
x -> startTime.plusDays(x.longValue()).format(xTickFormatter));
// set custom cursor tool tip text
chart.getStyler().setCursorEnabled(true);
DateTimeFormatter cursorXFormatter = DateTimeFormatter.ofPattern("LLL dd");
chart
.getStyler()
.setCustomCursorXDataFormattingFunction(
x -> startTime.plusDays(x.longValue()).format(cursorXFormatter));
return chart;
}
@Override
public String getExampleChartName() {
return getClass().getSimpleName() + " - Custom Date Formatter Without Years";
}
}
| apache-2.0 |
welterde/ewok | com/planet_ink/coffee_mud/core/intermud/packets/ChannelUserReply.java | 2969 | package com.planet_ink.coffee_mud.core.intermud.packets;
import com.planet_ink.coffee_mud.core.intermud.server.*;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.Vector;
/**
* Copyright (c) 2008-2010 Bo Zimmerman
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
@SuppressWarnings("unchecked")
public class ChannelUserReply extends Packet {
public String userRequested=null;
public String userVisibleName=null;
public char gender = 'N';
public ChannelUserReply()
{
super();
type = Packet.CHAN_USER_REP;
}
public ChannelUserReply(Vector v) throws InvalidPacketException {
super(v);
try {
type = Packet.CHAN_USER_REP;
try{
userRequested = (String)v.elementAt(6);
userVisibleName = (String)v.elementAt(7);
int gend = CMath.s_int(v.elementAt(8).toString());
switch(gend) {
case 0: gender='M'; break;
case 1: gender='F'; break;
case 2: gender='N'; break;
}
}catch(Exception e){}
}
catch( ClassCastException e ) {
throw new InvalidPacketException();
}
}
public void send() throws InvalidPacketException {
if( userRequested == null || userVisibleName == null ) {
throw new InvalidPacketException();
}
super.send();
}
public String toString() {
int genderCode = 0;
switch(gender) {
case 'M': genderCode=0; break;
case 'F': genderCode=1; break;
case 'N': genderCode=2; break;
}
String str="({\"chan-user-req\",5,\"" + Server.getMudName() +
"\",0,\"" + target_mud + "\",0,\"" + userRequested
+ "\",\"" + userVisibleName
+ "\"," + genderCode + ",})";
return str;
}
}
| apache-2.0 |
basepom/duplicate-finder-maven-plugin | src/it/setup-it/class-jars/second-class-jar/src/main/java/demo/package-info.java | 610 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@Deprecated
package demo;
/**
* Package docs.
*/
| apache-2.0 |
berndhopp/guava | android/guava/src/com/google/common/collect/ImmutableListMultimap.java | 13676 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.concurrent.LazyInit;
import com.google.j2objc.annotations.RetainedWith;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Collection;
import java.util.Comparator;
import java.util.Map;
import java.util.Map.Entry;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* A {@link ListMultimap} whose contents will never change, with many other important properties
* detailed at {@link ImmutableCollection}.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/ImmutableCollectionsExplained"> immutable collections</a>.
*
* @author Jared Levy
* @since 2.0
*/
@GwtCompatible(serializable = true, emulated = true)
public class ImmutableListMultimap<K, V> extends ImmutableMultimap<K, V>
implements ListMultimap<K, V> {
/** Returns the empty multimap. */
// Casting is safe because the multimap will never hold any elements.
@SuppressWarnings("unchecked")
public static <K, V> ImmutableListMultimap<K, V> of() {
return (ImmutableListMultimap<K, V>) EmptyImmutableListMultimap.INSTANCE;
}
/** Returns an immutable multimap containing a single entry. */
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1) {
ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder();
builder.put(k1, v1);
return builder.build();
}
/** Returns an immutable multimap containing the given entries, in order. */
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2) {
ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
return builder.build();
}
/** Returns an immutable multimap containing the given entries, in order. */
public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) {
ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
return builder.build();
}
/** Returns an immutable multimap containing the given entries, in order. */
public static <K, V> ImmutableListMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
return builder.build();
}
/** Returns an immutable multimap containing the given entries, in order. */
public static <K, V> ImmutableListMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
builder.put(k5, v5);
return builder.build();
}
// looking for of() with > 5 entries? Use the builder instead.
/**
* Returns a new builder. The generated builder is equivalent to the builder created by the {@link
* Builder} constructor.
*/
public static <K, V> Builder<K, V> builder() {
return new Builder<>();
}
/**
* A builder for creating immutable {@code ListMultimap} instances, especially {@code public
* static final} multimaps ("constant multimaps"). Example:
*
* <pre>{@code
* static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP =
* new ImmutableListMultimap.Builder<String, Integer>()
* .put("one", 1)
* .putAll("several", 1, 2, 3)
* .putAll("many", 1, 2, 3, 4, 5)
* .build();
* }</pre>
*
* <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build
* multiple multimaps in series. Each multimap contains the key-value mappings in the previously
* created multimaps.
*
* @since 2.0
*/
public static final class Builder<K, V> extends ImmutableMultimap.Builder<K, V> {
/**
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
* ImmutableListMultimap#builder}.
*/
public Builder() {}
@CanIgnoreReturnValue
@Override
public Builder<K, V> put(K key, V value) {
super.put(key, value);
return this;
}
/**
* {@inheritDoc}
*
* @since 11.0
*/
@CanIgnoreReturnValue
@Override
public Builder<K, V> put(Entry<? extends K, ? extends V> entry) {
super.put(entry);
return this;
}
/**
* {@inheritDoc}
*
* @since 19.0
*/
@CanIgnoreReturnValue
@Beta
@Override
public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) {
super.putAll(entries);
return this;
}
@CanIgnoreReturnValue
@Override
public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
super.putAll(key, values);
return this;
}
@CanIgnoreReturnValue
@Override
public Builder<K, V> putAll(K key, V... values) {
super.putAll(key, values);
return this;
}
@CanIgnoreReturnValue
@Override
public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) {
super.putAll(multimap);
return this;
}
/**
* {@inheritDoc}
*
* @since 8.0
*/
@CanIgnoreReturnValue
@Override
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
super.orderKeysBy(keyComparator);
return this;
}
/**
* {@inheritDoc}
*
* @since 8.0
*/
@CanIgnoreReturnValue
@Override
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
super.orderValuesBy(valueComparator);
return this;
}
/** Returns a newly-created immutable list multimap. */
@Override
public ImmutableListMultimap<K, V> build() {
return (ImmutableListMultimap<K, V>) super.build();
}
}
/**
* Returns an immutable multimap containing the same mappings as {@code multimap}. The generated
* multimap's key and value orderings correspond to the iteration ordering of the {@code
* multimap.asMap()} view.
*
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
* safe to do so. The exact circumstances under which a copy will or will not be performed are
* undocumented and subject to change.
*
* @throws NullPointerException if any key or value in {@code multimap} is null
*/
public static <K, V> ImmutableListMultimap<K, V> copyOf(
Multimap<? extends K, ? extends V> multimap) {
if (multimap.isEmpty()) {
return of();
}
// TODO(lowasser): copy ImmutableSetMultimap by using asList() on the sets
if (multimap instanceof ImmutableListMultimap) {
@SuppressWarnings("unchecked") // safe since multimap is not writable
ImmutableListMultimap<K, V> kvMultimap = (ImmutableListMultimap<K, V>) multimap;
if (!kvMultimap.isPartialView()) {
return kvMultimap;
}
}
return fromMapEntries(multimap.asMap().entrySet(), null);
}
/** Creates an ImmutableListMultimap from an asMap.entrySet. */
static <K, V> ImmutableListMultimap<K, V> fromMapEntries(
Collection<? extends Map.Entry<? extends K, ? extends Collection<? extends V>>> mapEntries,
@NullableDecl Comparator<? super V> valueComparator) {
if (mapEntries.isEmpty()) {
return of();
}
ImmutableMap.Builder<K, ImmutableList<V>> builder =
new ImmutableMap.Builder<>(mapEntries.size());
int size = 0;
for (Entry<? extends K, ? extends Collection<? extends V>> entry : mapEntries) {
K key = entry.getKey();
Collection<? extends V> values = entry.getValue();
ImmutableList<V> list =
(valueComparator == null)
? ImmutableList.copyOf(values)
: ImmutableList.sortedCopyOf(valueComparator, values);
if (!list.isEmpty()) {
builder.put(key, list);
size += list.size();
}
}
return new ImmutableListMultimap<>(builder.build(), size);
}
/**
* Returns an immutable multimap containing the specified entries. The returned multimap iterates
* over keys in the order they were first encountered in the input, and the values for each key
* are iterated in the order they were encountered.
*
* @throws NullPointerException if any key, value, or entry is null
* @since 19.0
*/
@Beta
public static <K, V> ImmutableListMultimap<K, V> copyOf(
Iterable<? extends Entry<? extends K, ? extends V>> entries) {
return new Builder<K, V>().putAll(entries).build();
}
ImmutableListMultimap(ImmutableMap<K, ImmutableList<V>> map, int size) {
super(map, size);
}
// views
/**
* Returns an immutable list of the values for the given key. If no mappings in the multimap have
* the provided key, an empty immutable list is returned. The values are in the same order as the
* parameters used to build this multimap.
*/
@Override
public ImmutableList<V> get(@NullableDecl K key) {
// This cast is safe as its type is known in constructor.
ImmutableList<V> list = (ImmutableList<V>) map.get(key);
return (list == null) ? ImmutableList.<V>of() : list;
}
@LazyInit @RetainedWith private transient ImmutableListMultimap<V, K> inverse;
/**
* {@inheritDoc}
*
* <p>Because an inverse of a list multimap can contain multiple pairs with the same key and
* value, this method returns an {@code ImmutableListMultimap} rather than the {@code
* ImmutableMultimap} specified in the {@code ImmutableMultimap} class.
*
* @since 11.0
*/
@Override
public ImmutableListMultimap<V, K> inverse() {
ImmutableListMultimap<V, K> result = inverse;
return (result == null) ? (inverse = invert()) : result;
}
private ImmutableListMultimap<V, K> invert() {
Builder<V, K> builder = builder();
for (Entry<K, V> entry : entries()) {
builder.put(entry.getValue(), entry.getKey());
}
ImmutableListMultimap<V, K> invertedMultimap = builder.build();
invertedMultimap.inverse = this;
return invertedMultimap;
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@CanIgnoreReturnValue
@Deprecated
@Override
public ImmutableList<V> removeAll(Object key) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@CanIgnoreReturnValue
@Deprecated
@Override
public ImmutableList<V> replaceValues(K key, Iterable<? extends V> values) {
throw new UnsupportedOperationException();
}
/**
* @serialData number of distinct keys, and then for each distinct key: the key, the number of
* values for that key, and the key's values
*/
@GwtIncompatible // java.io.ObjectOutputStream
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
Serialization.writeMultimap(this, stream);
}
@GwtIncompatible // java.io.ObjectInputStream
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
int keyCount = stream.readInt();
if (keyCount < 0) {
throw new InvalidObjectException("Invalid key count " + keyCount);
}
ImmutableMap.Builder<Object, ImmutableList<Object>> builder = ImmutableMap.builder();
int tmpSize = 0;
for (int i = 0; i < keyCount; i++) {
Object key = stream.readObject();
int valueCount = stream.readInt();
if (valueCount <= 0) {
throw new InvalidObjectException("Invalid value count " + valueCount);
}
ImmutableList.Builder<Object> valuesBuilder = ImmutableList.builder();
for (int j = 0; j < valueCount; j++) {
valuesBuilder.add(stream.readObject());
}
builder.put(key, valuesBuilder.build());
tmpSize += valueCount;
}
ImmutableMap<Object, ImmutableList<Object>> tmpMap;
try {
tmpMap = builder.build();
} catch (IllegalArgumentException e) {
throw (InvalidObjectException) new InvalidObjectException(e.getMessage()).initCause(e);
}
FieldSettersHolder.MAP_FIELD_SETTER.set(this, tmpMap);
FieldSettersHolder.SIZE_FIELD_SETTER.set(this, tmpSize);
}
@GwtIncompatible // Not needed in emulated source
private static final long serialVersionUID = 0;
}
| apache-2.0 |
cmusatyalab/elijah-provisioning | android/android_fluid/src/edu/cmu/cs/cloudlet/android/application/graphics/GNetworkClientReceiver.java | 5437 | package edu.cmu.cs.cloudlet.android.application.graphics;
import java.io.DataInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import org.apache.http.util.ByteArrayBuffer;
import org.json.JSONException;
import org.teleal.common.util.ByteArray;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
public class GNetworkClientReceiver extends Thread {
private Handler mHandler;
private DataInputStream networkReader;
private boolean isThreadRun = true;
private int messageCounter = 0;
protected byte[] recvByte = null;
ArrayList<Particle> particleList = new ArrayList<Particle>();
private int startFrameID = 0;
private int currentFrameID = 0;
private int clientID = 0;
private TreeMap<Integer, Long> receiver_stamps = new TreeMap<Integer, Long>();
private ArrayList<Long> reciver_time_list = new ArrayList<Long>();
private int duplicated_client_id;
private HashMap<Integer, Long> latencyRecords = new HashMap<Integer, Long>();
private long totalLatency = 0;
public GNetworkClientReceiver(DataInputStream dataInputStream, Handler mHandler) {
this.networkReader = dataInputStream;
this.mHandler = mHandler;
}
public TreeMap<Integer, Long> getReceiverStamps(){
return this.receiver_stamps;
}
public ArrayList<Long> getReceivedTimeList(){
return this.reciver_time_list;
}
public int getDuplicatedAcc(){
return this.duplicated_client_id;
}
@Override
public void run() {
while(isThreadRun == false){
try {
Thread.sleep(1);
} catch (InterruptedException e) {
}
}
// Recv initial simulation information
try {
int containerWidth = networkReader.readInt();
int containerHeight = networkReader.readInt();
Log.d("krha", "container size : " + containerWidth + ", " + containerHeight);
VisualizationStaticInfo.containerWidth = containerWidth;
VisualizationStaticInfo.containerHeight = containerHeight;
} catch (IOException e1) {
e1.printStackTrace();
}
long startTime = System.currentTimeMillis();
while(isThreadRun == true){
int recvSize = 0;
try {
recvSize = this.receiveMsg(networkReader);
long currentTime = System.currentTimeMillis();
long duration = currentTime - startTime;
long latency = currentTime - this.getSentTime(this.clientID);
if(latency > 0)
totalLatency += latency;
int totalFrameNumber = this.getLastFrameID()-this.startFrameID;
if(totalFrameNumber > 0 && latency > 0){
String message = "FPS: " + this.roundDigit(1000.0*totalFrameNumber/duration) +
", ACC: " + this.roundDigit(1000.0*this.clientID/duration) +
", Latency: " + this.roundDigit(1.0*totalLatency/totalFrameNumber) +
" / " + latency;
this.notifyStatus(GNetworkClient.PROGRESS_MESSAGE, message, recvByte);
}
} catch (IOException e) {
Log.e("krha", e.toString());
// this.notifyStatus(GNetworkClient.NETWORK_ERROR, e.toString(), null);
break;
}
}
}
private int receiveMsg(DataInputStream reader) throws IOException {
this.clientID = reader.readInt();
this.currentFrameID = reader.readInt();
int retLength = reader.readInt();
if(this.startFrameID == 0)
this.startFrameID = this.currentFrameID;
if(recvByte == null || recvByte.length < retLength){
recvByte = new byte[retLength];
}
int readSize = 0;
while(readSize < retLength){
int ret = reader.read(this.recvByte, readSize, retLength-readSize);
if(ret <= 0){
break;
}
readSize += ret;
}
long currentTime = System.currentTimeMillis();
if(this.receiver_stamps.get(this.clientID) == null){
this.receiver_stamps.put(this.clientID, currentTime);
// Log.d("krha", "Save Client ID : " + this.clientID);
}else{
duplicated_client_id++;
}
this.reciver_time_list.add(currentTime);
return readSize;
}
private void notifyStatus(int command, String string, byte[] recvData) {
// Copy data with endian switching
ByteBuffer buf = ByteBuffer.allocate(recvData.length);
buf.order(ByteOrder.LITTLE_ENDIAN);
buf.put(recvData);
buf.flip();
buf.compact();
Message msg = Message.obtain();
msg.what = command;
msg.obj = buf;
Bundle data = new Bundle();
data.putString("message", string);
msg.setData(data);
this.mHandler.sendMessage(msg);
}
public void close() {
this.isThreadRun = false;
try {
if(this.networkReader != null)
this.networkReader.close();
} catch (IOException e) {
Log.e("krha", e.toString());
}
}
public int getLastFrameID() {
return this.currentFrameID;
}
public void recordSentTime(int accIndex, long currentTimeMillis) {
this.latencyRecords.put(accIndex, System.currentTimeMillis());
}
public long getSentTime(int accID){
if(this.latencyRecords.containsKey(accID) == false){
return Long.MAX_VALUE;
}else{
long sentTime = this.latencyRecords.remove(accID);
return sentTime;
}
}
public static String roundDigit(double paramFloat) {
return String.format("%.2f", paramFloat);
}
}
| apache-2.0 |
eemirtekin/Sakai-10.6-TR | kernel/kernel-impl/src/main/java/org/sakaiproject/content/impl/HtmlPageFilter.java | 5249 | /**********************************************************************************
* $URL: $
* $Id: $
***********************************************************************************
*
* Copyright (c) 2010, 2011, 2012, 2013, 2014 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.content.impl;
import java.text.MessageFormat;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.content.api.ContentFilter;
import org.sakaiproject.content.api.ContentResource;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.EntityManager;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.util.Validator;
import org.apache.commons.lang.StringUtils;
/**
* Simple filter that adds header and footer fragments to HTML pages, it can detect
* to add HTML or be forced to/not.
*
* @author buckett
*
*/
public class HtmlPageFilter implements ContentFilter {
private EntityManager entityManager;
private ServerConfigurationService serverConfigurationService;
/** If <code>false</false> then this filter is disabled. */
private boolean enabled = true;
private String headerTemplate =
"<html>\n" +
" <head>\n" +
" <meta http-equiv=\"Content-Style-Type\" content=\"text/css\" /> \n" +
" <title>{2}</title>\n" +
" <link href=\"{0}/tool_base.css\" type=\"text/css\" rel=\"stylesheet\" media=\"all\" />\n" +
" <link href=\"{0}/{1}/tool.css\" type=\"text/css\" rel=\"stylesheet\" media=\"all\" />\n" +
" <script type=\"text/javascript\" language=\"JavaScript\" src=\"/library/js/headscripts.js\"></script>\n" +
" <style>body '{ padding: 5px !important; }'</style>\n" +
" </head>\n" +
" <body>\n";
private String footerTemplate = "\n" +
" </body>\n" +
"</html>\n";
public void setEntityManager(EntityManager entityManager) {
this.entityManager = entityManager;
}
public void setServerConfigurationService(ServerConfigurationService serverConfigurationService) {
this.serverConfigurationService = serverConfigurationService;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public void setHeaderTemplate(String headerTemplate) {
this.headerTemplate = headerTemplate;
}
public void setFooterTemplate(String footerTemplate) {
this.footerTemplate = footerTemplate;
}
public boolean isFiltered(ContentResource resource) {
String addHtml = resource.getProperties().getProperty(ResourceProperties.PROP_ADD_HTML);
return enabled && ("text/html".equals(resource.getContentType())) && ((addHtml == null) || (!addHtml.equals("no") || addHtml.equals("yes")));
}
public ContentResource wrap(final ContentResource content) {
if (!isFiltered(content)) {
return content;
}
Reference contentRef = entityManager.newReference(content.getReference());
Reference siteRef = entityManager.newReference(contentRef.getContext());
Entity entity = siteRef.getEntity();
String addHtml = content.getProperties().getProperty(ResourceProperties.PROP_ADD_HTML);
String skinRepo = getSkinRepo();
String siteSkin = getSiteSkin(entity);
final boolean detectHtml = addHtml == null || addHtml.equals("auto");
String title = getTitle(content);
final String header = MessageFormat.format(headerTemplate, skinRepo, siteSkin, title);
final String footer = footerTemplate;
return new WrappedContentResource(content, header, footer, detectHtml);
}
private String getTitle(final ContentResource content) {
String title = content.getProperties().getProperty(ResourceProperties.PROP_DISPLAY_NAME);
if (title == null) {
title = content.getId();
}
return Validator.escapeHtml(title);
}
private String getSkinRepo() {
final String skinRepo = serverConfigurationService.getString("skin.repo", "/library/skins");
return skinRepo;
}
private String getSiteSkin(Entity entity) {
String siteSkin = serverConfigurationService.getString("skin.default", "default");
if (entity instanceof Site) {
Site site =(Site)entity;
if (site.getSkin() != null && site.getSkin().length() > 0) {
siteSkin = site.getSkin();
}
}
String neoPrefix = serverConfigurationService.getString("portal.neoprefix", "neo-");
String portalTemplate = serverConfigurationService.getString("portal.templates", "neoskin");
if (StringUtils.equals("neoskin", portalTemplate) && StringUtils.isNotBlank(neoPrefix) && !StringUtils.startsWith(siteSkin, neoPrefix)) {
siteSkin = neoPrefix + siteSkin;
}
return siteSkin;
}
}
| apache-2.0 |
srowhani/migrate-app | app/src/main/java/com/dankideacentral/dic/activities/AppCompatPreferenceActivity.java | 3008 | package com.dankideacentral.dic.activities;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceActivity;
import android.support.annotation.LayoutRes;
import android.support.annotation.Nullable;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatDelegate;
import android.support.v7.widget.Toolbar;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
/**
* A {@link android.preference.PreferenceActivity} which implements and proxies the necessary calls
* to be used with AppCompat.
*/
public abstract class AppCompatPreferenceActivity extends PreferenceActivity {
private AppCompatDelegate mDelegate;
@Override
protected void onCreate(Bundle savedInstanceState) {
getDelegate().installViewFactory();
getDelegate().onCreate(savedInstanceState);
super.onCreate(savedInstanceState);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
getDelegate().onPostCreate(savedInstanceState);
}
public ActionBar getSupportActionBar() {
return getDelegate().getSupportActionBar();
}
public void setSupportActionBar(@Nullable Toolbar toolbar) {
getDelegate().setSupportActionBar(toolbar);
}
@Override
public MenuInflater getMenuInflater() {
return getDelegate().getMenuInflater();
}
@Override
public void setContentView(@LayoutRes int layoutResID) {
getDelegate().setContentView(layoutResID);
}
@Override
public void setContentView(View view) {
getDelegate().setContentView(view);
}
@Override
public void setContentView(View view, ViewGroup.LayoutParams params) {
getDelegate().setContentView(view, params);
}
@Override
public void addContentView(View view, ViewGroup.LayoutParams params) {
getDelegate().addContentView(view, params);
}
@Override
protected void onPostResume() {
super.onPostResume();
getDelegate().onPostResume();
}
@Override
protected void onTitleChanged(CharSequence title, int color) {
super.onTitleChanged(title, color);
getDelegate().setTitle(title);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
getDelegate().onConfigurationChanged(newConfig);
}
@Override
protected void onStop() {
super.onStop();
getDelegate().onStop();
}
@Override
protected void onDestroy() {
super.onDestroy();
getDelegate().onDestroy();
}
public void invalidateOptionsMenu() {
getDelegate().invalidateOptionsMenu();
}
private AppCompatDelegate getDelegate() {
if (mDelegate == null) {
mDelegate = AppCompatDelegate.create(this, null);
}
return mDelegate;
}
}
| apache-2.0 |
dgestep/big-code-bang | src/main/resources/data-repository-test-user-templates/TestUserToken.java | 1331 | package ${topLevelDomain}.${companyName}.${productName}.model.repository.user;
import ${topLevelDomain}.${companyName}.${productName}.model.data.UserToken;
import ${topLevelDomain}.${companyName}.${productName}.model.repository.ApplicationTestCase;
import org.junit.Assert;
import org.junit.Test;
import javax.annotation.Resource;
import java.sql.Timestamp;
import java.util.UUID;
public class TestUserToken extends ApplicationTestCase {
@Resource(name = "UserTokenRepository")
private UserTokenRepository userTokenRepository;
@Test
public void testUpdateByEmail() {
UserToken data = new UserToken();
data.setCreateTs(new Timestamp(System.currentTimeMillis()));
data.setEmailAddress("from@gmail.com");
data.setTokenUuid(UUID.randomUUID().toString());
data.setUserUuid("user1");
userTokenRepository.add(data);
UserToken retr = userTokenRepository.retrieve(UserToken.class, data.getTokenUuid());
Assert.assertNotNull(retr);
int rows = userTokenRepository.updateByEmail("from@gmail.com", "to@gmail.com");
Assert.assertEquals(1, rows);
retr = userTokenRepository.retrieve(UserToken.class, data.getTokenUuid());
Assert.assertNotNull(retr);
Assert.assertEquals("to@gmail.com", retr.getEmailAddress());
}
}
| apache-2.0 |
aldridged/gtg-gts-bhp | src/org/opengts/servers/icare/package-info.java | 1133 | // ----------------------------------------------------------------------------
// Copyright 2006-2010, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2010/04/25 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
/**
*** Contains the ICare device communication server support.
**/
package org.opengts.servers.icare;
| apache-2.0 |
gusavila92/java-android-websocket-client | src/main/java/tech/gusavila92/websocketclient/exceptions/IllegalSchemeException.java | 301 | package tech.gusavila92.websocketclient.exceptions;
/**
* Exception which indicates that the received schema is invalid
*
* @author Gustavo Avila
*
*/
public class IllegalSchemeException extends IllegalArgumentException {
public IllegalSchemeException(String message) {
super(message);
}
}
| apache-2.0 |
apache/santuario-java | src/main/java/org/apache/xml/security/stax/impl/processor/input/XMLEncryptedKeyInputHandler.java | 18812 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.xml.security.stax.impl.processor.input;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.spec.MGF1ParameterSpec;
import java.util.Base64;
import java.util.Deque;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.OAEPParameterSpec;
import javax.crypto.spec.PSource;
import javax.crypto.spec.SecretKeySpec;
import javax.xml.bind.JAXBElement;
import org.apache.xml.security.binding.xmldsig.DigestMethodType;
import org.apache.xml.security.binding.xmldsig.KeyInfoType;
import org.apache.xml.security.binding.xmlenc.CipherValueType;
import org.apache.xml.security.binding.xmlenc.EncryptedKeyType;
import org.apache.xml.security.binding.xmlenc11.MGFType;
import org.apache.xml.security.binding.xop.Include;
import org.apache.xml.security.exceptions.XMLSecurityException;
import org.apache.xml.security.stax.config.JCEAlgorithmMapper;
import org.apache.xml.security.stax.ext.AbstractInputSecurityHeaderHandler;
import org.apache.xml.security.stax.ext.InboundSecurityContext;
import org.apache.xml.security.stax.ext.InputProcessorChain;
import org.apache.xml.security.stax.ext.XMLSecurityConstants;
import org.apache.xml.security.stax.ext.XMLSecurityProperties;
import org.apache.xml.security.stax.ext.XMLSecurityUtils;
import org.apache.xml.security.stax.ext.stax.XMLSecEvent;
import org.apache.xml.security.stax.impl.securityToken.AbstractInboundSecurityToken;
import org.apache.xml.security.stax.impl.util.IDGenerator;
import org.apache.xml.security.stax.securityEvent.AlgorithmSuiteSecurityEvent;
import org.apache.xml.security.stax.securityEvent.EncryptedKeyTokenSecurityEvent;
import org.apache.xml.security.stax.securityToken.InboundSecurityToken;
import org.apache.xml.security.stax.securityToken.SecurityTokenConstants;
import org.apache.xml.security.stax.securityToken.SecurityTokenFactory;
import org.apache.xml.security.stax.securityToken.SecurityTokenProvider;
import org.apache.xml.security.utils.XMLUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An input handler for the EncryptedKey XML Structure
*
*/
public class XMLEncryptedKeyInputHandler extends AbstractInputSecurityHeaderHandler {
private static final transient Logger LOG = LoggerFactory.getLogger(XMLEncryptedKeyInputHandler.class);
@Override
public void handle(final InputProcessorChain inputProcessorChain, final XMLSecurityProperties securityProperties,
final Deque<XMLSecEvent> eventQueue, final Integer index) throws XMLSecurityException {
@SuppressWarnings("unchecked")
final EncryptedKeyType encryptedKeyType =
((JAXBElement<EncryptedKeyType>) parseStructure(eventQueue, index, securityProperties)).getValue();
final XMLSecEvent responsibleXMLSecStartXMLEvent = getResponsibleStartXMLEvent(eventQueue, index);
handle(inputProcessorChain, encryptedKeyType, responsibleXMLSecStartXMLEvent, securityProperties);
}
public void handle(final InputProcessorChain inputProcessorChain,
final EncryptedKeyType encryptedKeyType,
final XMLSecEvent responsibleXMLSecStartXMLEvent,
final XMLSecurityProperties securityProperties) throws XMLSecurityException {
if (encryptedKeyType.getEncryptionMethod() == null) {
throw new XMLSecurityException("stax.encryption.noEncAlgo");
}
if (encryptedKeyType.getId() == null) {
encryptedKeyType.setId(IDGenerator.generateID(null));
}
final InboundSecurityContext inboundSecurityContext = inputProcessorChain.getSecurityContext();
final SecurityTokenProvider<InboundSecurityToken> securityTokenProvider =
new SecurityTokenProvider<InboundSecurityToken>() {
private AbstractInboundSecurityToken securityToken;
@Override
public InboundSecurityToken getSecurityToken() throws XMLSecurityException {
if (this.securityToken != null) {
return this.securityToken;
}
this.securityToken = new AbstractInboundSecurityToken(
inboundSecurityContext, encryptedKeyType.getId(),
SecurityTokenConstants.KeyIdentifier_EncryptedKey, true) {
private byte[] decryptedKey;
@Override
public Key getKey(String algorithmURI, XMLSecurityConstants.AlgorithmUsage algorithmUsage, String correlationID)
throws XMLSecurityException {
Key key = getSecretKey().get(algorithmURI);
if (key != null) {
return key;
}
String algoFamily = JCEAlgorithmMapper.getJCEKeyAlgorithmFromURI(algorithmURI);
key = new SecretKeySpec(getSecret(this, correlationID, algorithmURI), algoFamily);
setSecretKey(algorithmURI, key);
return key;
}
@Override
public InboundSecurityToken getKeyWrappingToken() throws XMLSecurityException {
return getWrappingSecurityToken(this);
}
@Override
public SecurityTokenConstants.TokenType getTokenType() {
return SecurityTokenConstants.EncryptedKeyToken;
}
private InboundSecurityToken wrappingSecurityToken;
private InboundSecurityToken getWrappingSecurityToken(InboundSecurityToken wrappedSecurityToken)
throws XMLSecurityException {
if (wrappingSecurityToken != null) {
return this.wrappingSecurityToken;
}
KeyInfoType keyInfoType = encryptedKeyType.getKeyInfo();
this.wrappingSecurityToken = SecurityTokenFactory.getInstance().getSecurityToken(
keyInfoType,
SecurityTokenConstants.KeyUsage_Decryption,
securityProperties,
inboundSecurityContext
);
this.wrappingSecurityToken.addWrappedToken(wrappedSecurityToken);
return this.wrappingSecurityToken;
}
private byte[] getSecret(InboundSecurityToken wrappedSecurityToken, String correlationID,
String symmetricAlgorithmURI) throws XMLSecurityException {
if (this.decryptedKey != null) {
return this.decryptedKey;
}
String algorithmURI = encryptedKeyType.getEncryptionMethod().getAlgorithm();
if (algorithmURI == null) {
throw new XMLSecurityException("stax.encryption.noEncAlgo");
}
String jceName = JCEAlgorithmMapper.translateURItoJCEID(algorithmURI);
String jceProvider = JCEAlgorithmMapper.getJCEProviderFromURI(algorithmURI);
if (jceName == null) {
throw new XMLSecurityException("algorithms.NoSuchMap",
new Object[] {algorithmURI});
}
final InboundSecurityToken wrappingSecurityToken = getWrappingSecurityToken(wrappedSecurityToken);
Cipher cipher;
try {
XMLSecurityConstants.AlgorithmUsage algorithmUsage;
if (wrappingSecurityToken.isAsymmetric()) {
algorithmUsage = XMLSecurityConstants.Asym_Key_Wrap;
} else {
algorithmUsage = XMLSecurityConstants.Sym_Key_Wrap;
}
if (jceProvider == null) {
cipher = Cipher.getInstance(jceName);
} else {
cipher = Cipher.getInstance(jceName, jceProvider);
}
if (XMLSecurityConstants.NS_XENC11_RSAOAEP.equals(algorithmURI) ||
XMLSecurityConstants.NS_XENC_RSAOAEPMGF1P.equals(algorithmURI)) {
final DigestMethodType digestMethodType =
XMLSecurityUtils.getQNameType(encryptedKeyType.getEncryptionMethod().getContent(), XMLSecurityConstants.TAG_dsig_DigestMethod);
String jceDigestAlgorithm = "SHA-1";
if (digestMethodType != null) {
AlgorithmSuiteSecurityEvent algorithmSuiteSecurityEvent = new AlgorithmSuiteSecurityEvent();
algorithmSuiteSecurityEvent.setAlgorithmURI(digestMethodType.getAlgorithm());
algorithmSuiteSecurityEvent.setAlgorithmUsage(XMLSecurityConstants.EncDig);
algorithmSuiteSecurityEvent.setCorrelationID(correlationID);
inboundSecurityContext.registerSecurityEvent(algorithmSuiteSecurityEvent);
jceDigestAlgorithm = JCEAlgorithmMapper.translateURItoJCEID(digestMethodType.getAlgorithm());
}
PSource.PSpecified pSource = PSource.PSpecified.DEFAULT;
final byte[] oaepParams =
XMLSecurityUtils.getQNameType(encryptedKeyType.getEncryptionMethod().getContent(), XMLSecurityConstants.TAG_xenc_OAEPparams);
if (oaepParams != null) {
pSource = new PSource.PSpecified(oaepParams);
}
MGF1ParameterSpec mgfParameterSpec = new MGF1ParameterSpec("SHA-1");
final MGFType mgfType =
XMLSecurityUtils.getQNameType(encryptedKeyType.getEncryptionMethod().getContent(), XMLSecurityConstants.TAG_xenc11_MGF);
if (mgfType != null) {
String jceMGFAlgorithm = JCEAlgorithmMapper.translateURItoJCEID(mgfType.getAlgorithm());
mgfParameterSpec = new MGF1ParameterSpec(jceMGFAlgorithm);
}
OAEPParameterSpec oaepParameterSpec = new OAEPParameterSpec(jceDigestAlgorithm, "MGF1", mgfParameterSpec, pSource);
cipher.init(Cipher.UNWRAP_MODE, wrappingSecurityToken.getSecretKey(algorithmURI, algorithmUsage, correlationID), oaepParameterSpec);
} else {
cipher.init(Cipher.UNWRAP_MODE, wrappingSecurityToken.getSecretKey(algorithmURI, algorithmUsage, correlationID));
}
if (encryptedKeyType.getCipherData() == null
|| encryptedKeyType.getCipherData().getCipherValue() == null
|| encryptedKeyType.getCipherData().getCipherValue().getContent() == null
|| encryptedKeyType.getCipherData().getCipherValue().getContent().isEmpty()) {
throw new XMLSecurityException("stax.encryption.noCipherValue");
}
} catch (NoSuchPaddingException | NoSuchAlgorithmException | InvalidAlgorithmParameterException
| InvalidKeyException | NoSuchProviderException e) {
throw new XMLSecurityException(e);
}
byte[] encryptedBytes = getEncryptedBytes(encryptedKeyType.getCipherData().getCipherValue());
byte[] sha1Bytes = generateDigest(encryptedBytes);
String sha1Identifier = XMLUtils.encodeToString(sha1Bytes);
super.setSha1Identifier(sha1Identifier);
try {
Key key = cipher.unwrap(encryptedBytes,
jceName,
Cipher.SECRET_KEY);
return this.decryptedKey = key.getEncoded();
} catch (IllegalStateException e) {
throw new XMLSecurityException(e);
} catch (Exception e) {
LOG.warn("Unwrapping of the encrypted key failed with error: " + e.getMessage() + ". " +
"Generating a faked one to mitigate timing attacks.");
int keyLength = JCEAlgorithmMapper.getKeyLengthFromURI(symmetricAlgorithmURI);
this.decryptedKey = XMLSecurityConstants.generateBytes(keyLength / 8);
return this.decryptedKey;
}
}
};
this.securityToken.setElementPath(responsibleXMLSecStartXMLEvent.getElementPath());
this.securityToken.setXMLSecEvent(responsibleXMLSecStartXMLEvent);
return this.securityToken;
}
private byte[] getEncryptedBytes(CipherValueType cipherValue) throws XMLSecurityException {
StringBuilder sb = new StringBuilder();
for (Object obj : cipherValue.getContent()) {
if (obj instanceof String) {
sb.append((String)obj);
} else if (obj instanceof JAXBElement<?>) {
JAXBElement<?> element = (JAXBElement<?>)obj;
if (XMLSecurityConstants.TAG_XOP_INCLUDE.equals(element.getName())) {
Include include = (Include)element.getValue();
if (include != null && include.getHref() != null && include.getHref().startsWith("cid:")) {
return getBytesFromAttachment(include.getHref(), securityProperties);
}
}
}
}
return Base64.getMimeDecoder().decode(sb.toString());
}
@Override
public String getId() {
return encryptedKeyType.getId();
}
};
//register the key token for decryption:
inboundSecurityContext.registerSecurityTokenProvider(encryptedKeyType.getId(), securityTokenProvider);
//fire a tokenSecurityEvent
EncryptedKeyTokenSecurityEvent tokenSecurityEvent = new EncryptedKeyTokenSecurityEvent();
tokenSecurityEvent.setSecurityToken(securityTokenProvider.getSecurityToken());
tokenSecurityEvent.setCorrelationID(encryptedKeyType.getId());
inboundSecurityContext.registerSecurityEvent(tokenSecurityEvent);
//if this EncryptedKey structure contains a reference list, delegate it to a subclass
if (encryptedKeyType.getReferenceList() != null) {
handleReferenceList(inputProcessorChain, encryptedKeyType, securityProperties);
}
}
private byte[] generateDigest(byte[] inputBytes) throws XMLSecurityException {
try {
return MessageDigest.getInstance("SHA-1").digest(inputBytes);
} catch (NoSuchAlgorithmException e) {
throw new XMLSecurityException(e);
}
}
protected void handleReferenceList(final InputProcessorChain inputProcessorChain,
final EncryptedKeyType encryptedKeyType,
final XMLSecurityProperties securityProperties) throws XMLSecurityException {
// do nothing
}
protected byte[] getBytesFromAttachment(String xopUri, final XMLSecurityProperties securityProperties) throws XMLSecurityException {
throw new XMLSecurityException("errorMessages.NotYetImplementedException");
}
/*
<xenc:EncryptedKey xmlns:xenc="http://www.w3.org/2001/04/xmlenc#" Id="EncKeyId-1483925398">
<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5" />
<ds:KeyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
<wsse:SecurityTokenReference xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd">
<wsse:KeyIdentifier EncodingType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary"
ValueType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509SubjectKeyIdentifier">pHoiKNGY2YsLBKxwIV+jURt858M=</wsse:KeyIdentifier>
</wsse:SecurityTokenReference>
</ds:KeyInfo>
<xenc:CipherData>
<xenc:CipherValue>Khsa9SN3ALNXOgGDKOqihvfwGsXb9QN/q4Fpi9uuThgz+3D4oRSMkrGSPCqwG13vddvHywGAA/XNbWNT+5Xivz3lURCDCc2H/92YlXXo/crQNJnPlLrLZ81bGOzbNo7lnYQBLp/77K7b1bhldZAeV9ZfEW7DjbOMZ+k1dnDCu3A=</xenc:CipherValue>
</xenc:CipherData>
<xenc:ReferenceList>
<xenc:DataReference URI="#EncDataId-1612925417" />
</xenc:ReferenceList>
</xenc:EncryptedKey>
*/
}
| apache-2.0 |
ecurtin/spark-bench | cli/src/main/java/com/ibm/sparktc/sparkbench/datageneration/mlgenerator/Random16.java | 18912 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.sparktc.sparkbench.datageneration.mlgenerator;
/**
* This file is copied from Hadoop package org.apache.hadoop.examples.terasort.
*/
/**
* This class implements a 128-bit linear congruential generator.
* Specifically, if X0 is the most recently issued 128-bit random
* number (or a seed of 0 if no random number has already been generated,
* the next number to be generated, X1, is equal to:
* X1 = (a * X0 + c) mod 2**128
* where a is 47026247687942121848144207491837523525
* or 0x2360ed051fc65da44385df649fccf645
* and c is 98910279301475397889117759788405497857
* or 0x4a696d47726179524950202020202001
* The coefficient "a" is suggested by:
* Pierre L'Ecuyer, "Tables of linear congruential generators of different
* sizes and good lattice structure", Mathematics of Computation, 68
* pp. 249 - 260 (1999)
* http://www.ams.org/mcom/1999-68-225/S0025-5718-99-00996-5/S0025-5718-99-00996-5.pdf
* The constant "c" meets the simple suggestion by the same reference that
* it be odd.
*
* There is also a facility for quickly advancing the state of the
* generator by a fixed number of steps - this facilitates parallel
* generation.
*
* This is based on 1.0 of rand16.c from Chris Nyberg
* <chris.nyberg@ordinal.com>.
*/
public class Random16 {
/**
* The "Gen" array contain powers of 2 of the linear congruential generator.
* The index 0 struct contain the "a" coefficient and "c" constant for the
* generator. That is, the generator is:
* f(x) = (Gen[0].a * x + Gen[0].c) mod 2**128
*
* All structs after the first contain an "a" and "c" that
* comprise the square of the previous function.
*
* f**2(x) = (Gen[1].a * x + Gen[1].c) mod 2**128
* f**4(x) = (Gen[2].a * x + Gen[2].c) mod 2**128
* f**8(x) = (Gen[3].a * x + Gen[3].c) mod 2**128
* ...
*/
private static class RandomConstant {
final Unsigned16 a;
final Unsigned16 c;
public RandomConstant(String left, String right) {
a = new Unsigned16(left);
c = new Unsigned16(right);
}
}
private static final RandomConstant[] genArray = new RandomConstant[]{
/* [ 0] */ new RandomConstant("2360ed051fc65da44385df649fccf645",
"4a696d47726179524950202020202001"),
/* [ 1] */ new RandomConstant("17bce35bdf69743c529ed9eb20e0ae99",
"95e0e48262b3edfe04479485c755b646"),
/* [ 2] */ new RandomConstant("f4dd417327db7a9bd194dfbe42d45771",
"882a02c315362b60765f100068b33a1c"),
/* [ 3] */ new RandomConstant("6347af777a7898f6d1a2d6f33505ffe1",
"5efc4abfaca23e8ca8edb1f2dfbf6478"),
/* [ 4] */ new RandomConstant("b6a4239f3b315f84f6ef6d3d288c03c1",
"f25bd15439d16af594c1b1bafa6239f0"),
/* [ 5] */ new RandomConstant("2c82901ad1cb0cd182b631ba6b261781",
"89ca67c29c9397d59c612596145db7e0"),
/* [ 6] */ new RandomConstant("dab03f988288676ee49e66c4d2746f01",
"8b6ae036713bd578a8093c8eae5c7fc0"),
/* [ 7] */ new RandomConstant("602167331d86cf5684fe009a6d09de01",
"98a2542fd23d0dbdff3b886cdb1d3f80"),
/* [ 8] */ new RandomConstant("61ecb5c24d95b058f04c80a23697bc01",
"954db923fdb7933e947cd1edcecb7f00"),
/* [ 9] */ new RandomConstant("4a5c31e0654c28aa60474e83bf3f7801",
"00be4a36657c98cd204e8c8af7dafe00"),
/* [ 10] */ new RandomConstant("ae4f079d54fbece1478331d3c6bef001",
"991965329dccb28d581199ab18c5fc00"),
/* [ 11] */ new RandomConstant("101b8cb830c7cb927ff1ed50ae7de001",
"e1a8705b63ad5b8cd6c3d268d5cbf800"),
/* [ 12] */ new RandomConstant("f54a27fc056b00e7563f3505e0fbc001",
"2b657bbfd6ed9d632079e70c3c97f000"),
/* [ 13] */ new RandomConstant("df8a6fc1a833d201f98d719dd1f78001",
"59b60ee4c52fa49e9fe90682bd2fe000"),
/* [ 14] */ new RandomConstant("5480a5015f101a4ea7e3f183e3ef0001",
"cc099c88030679464fe86aae8a5fc000"),
/* [ 15] */ new RandomConstant("a498509e76e5d7925f539c28c7de0001",
"06b9abff9f9f33dd30362c0154bf8000"),
/* [ 16] */ new RandomConstant("0798a3d8b10dc72e60121cd58fbc0001",
"e296707121688d5a0260b293a97f0000"),
/* [ 17] */ new RandomConstant("1647d1e78ec02e665fafcbbb1f780001",
"189ffc4701ff23cb8f8acf6b52fe0000"),
/* [ 18] */ new RandomConstant("a7c982285e72bf8c0c8ddfb63ef00001",
"5141110ab208fb9d61fb47e6a5fc0000"),
/* [ 19] */ new RandomConstant("3eb78ee8fb8c56dbc5d4e06c7de00001",
"3c97caa62540f2948d8d340d4bf80000"),
/* [ 20] */ new RandomConstant("72d03b6f4681f2f9fe8e44d8fbc00001",
"1b25cb9cfe5a0c963174f91a97f00000"),
/* [ 21] */ new RandomConstant("ea85f81e4f502c9bc8ae99b1f7800001",
"0c644570b4a487103c5436352fe00000"),
/* [ 22] */ new RandomConstant("629c320db08b00c6bfa57363ef000001",
"3d0589c28869472bde517c6a5fc00000"),
/* [ 23] */ new RandomConstant("c5c4b9ce268d074a386be6c7de000001",
"bc95e5ab36477e65534738d4bf800000"),
/* [ 24] */ new RandomConstant("f30bbbbed1596187555bcd8fbc000001",
"ddb02ff72a031c01011f71a97f000000"),
/* [ 25] */ new RandomConstant("4a1000fb26c9eeda3cc79b1f78000001",
"2561426086d9acdb6c82e352fe000000"),
/* [ 26] */ new RandomConstant("89fb5307f6bf8ce2c1cf363ef0000001",
"64a788e3c118ed1c8215c6a5fc000000"),
/* [ 27] */ new RandomConstant("830b7b3358a5d67ea49e6c7de0000001",
"e65ea321908627cfa86b8d4bf8000000"),
/* [ 28] */ new RandomConstant("fd8a51da91a69fe1cd3cd8fbc0000001",
"53d27225604d85f9e1d71a97f0000000"),
/* [ 29] */ new RandomConstant("901a48b642b90b55aa79b1f780000001",
"ca5ec7a3ed1fe55e07ae352fe0000000"),
/* [ 30] */ new RandomConstant("118cdefdf32144f394f363ef00000001",
"4daebb2e085330651f5c6a5fc0000000"),
/* [ 31] */ new RandomConstant("0a88c0a91cff430829e6c7de00000001",
"9d6f1a00a8f3f76e7eb8d4bf80000000"),
/* [ 32] */ new RandomConstant("433bef4314f16a9453cd8fbc00000001",
"158c62f2b31e496dfd71a97f00000000"),
/* [ 33] */ new RandomConstant("c294b02995ae6738a79b1f7800000001",
"290e84a2eb15fd1ffae352fe00000000"),
/* [ 34] */ new RandomConstant("913575e0da8b16b14f363ef000000001",
"e3dc1bfbe991a34ff5c6a5fc00000000"),
/* [ 35] */ new RandomConstant("2f61b9f871cf4e629e6c7de000000001",
"ddf540d020b9eadfeb8d4bf800000000"),
/* [ 36] */ new RandomConstant("78d26ccbd68320c53cd8fbc000000001",
"8ee4950177ce66bfd71a97f000000000"),
/* [ 37] */ new RandomConstant("8b7ebd037898518a79b1f78000000001",
"39e0f787c907117fae352fe000000000"),
/* [ 38] */ new RandomConstant("0b5507b61f78e314f363ef0000000001",
"659d2522f7b732ff5c6a5fc000000000"),
/* [ 39] */ new RandomConstant("4f884628f812c629e6c7de0000000001",
"9e8722938612a5feb8d4bf8000000000"),
/* [ 40] */ new RandomConstant("be896744d4a98c53cd8fbc0000000001",
"e941a65d66b64bfd71a97f0000000000"),
/* [ 41] */ new RandomConstant("daf63a553b6318a79b1f780000000001",
"7b50d19437b097fae352fe0000000000"),
/* [ 42] */ new RandomConstant("2d7a23d8bf06314f363ef00000000001",
"59d7b68e18712ff5c6a5fc0000000000"),
/* [ 43] */ new RandomConstant("392b046a9f0c629e6c7de00000000001",
"4087bab2d5225feb8d4bf80000000000"),
/* [ 44] */ new RandomConstant("eb30fbb9c218c53cd8fbc00000000001",
"b470abc03b44bfd71a97f00000000000"),
/* [ 45] */ new RandomConstant("b9cdc30594318a79b1f7800000000001",
"366630eaba897fae352fe00000000000"),
/* [ 46] */ new RandomConstant("014ab453686314f363ef000000000001",
"a2dfc77e8512ff5c6a5fc00000000000"),
/* [ 47] */ new RandomConstant("395221c7d0c629e6c7de000000000001",
"1e0d25a14a25feb8d4bf800000000000"),
/* [ 48] */ new RandomConstant("4d972813a18c53cd8fbc000000000001",
"9d50a5d3944bfd71a97f000000000000"),
/* [ 49] */ new RandomConstant("06f9e2374318a79b1f78000000000001",
"bf7ab5eb2897fae352fe000000000000"),
/* [ 50] */ new RandomConstant("bd220cae86314f363ef0000000000001",
"925b14e6512ff5c6a5fc000000000000"),
/* [ 51] */ new RandomConstant("36fd3a5d0c629e6c7de0000000000001",
"724cce0ca25feb8d4bf8000000000000"),
/* [ 52] */ new RandomConstant("60def8ba18c53cd8fbc0000000000001",
"1af42d1944bfd71a97f0000000000000"),
/* [ 53] */ new RandomConstant("8d500174318a79b1f780000000000001",
"0f529e32897fae352fe0000000000000"),
/* [ 54] */ new RandomConstant("48e842e86314f363ef00000000000001",
"844e4c6512ff5c6a5fc0000000000000"),
/* [ 55] */ new RandomConstant("4af185d0c629e6c7de00000000000001",
"9f40d8ca25feb8d4bf80000000000000"),
/* [ 56] */ new RandomConstant("7a670ba18c53cd8fbc00000000000001",
"9912b1944bfd71a97f00000000000000"),
/* [ 57] */ new RandomConstant("86de174318a79b1f7800000000000001",
"9c69632897fae352fe00000000000000"),
/* [ 58] */ new RandomConstant("55fc2e86314f363ef000000000000001",
"e1e2c6512ff5c6a5fc00000000000000"),
/* [ 59] */ new RandomConstant("ccf85d0c629e6c7de000000000000001",
"68058ca25feb8d4bf800000000000000"),
/* [ 60] */ new RandomConstant("1df0ba18c53cd8fbc000000000000001",
"610b1944bfd71a97f000000000000000"),
/* [ 61] */ new RandomConstant("4be174318a79b1f78000000000000001",
"061632897fae352fe000000000000000"),
/* [ 62] */ new RandomConstant("d7c2e86314f363ef0000000000000001",
"1c2c6512ff5c6a5fc000000000000000"),
/* [ 63] */ new RandomConstant("af85d0c629e6c7de0000000000000001",
"7858ca25feb8d4bf8000000000000000"),
/* [ 64] */ new RandomConstant("5f0ba18c53cd8fbc0000000000000001",
"f0b1944bfd71a97f0000000000000000"),
/* [ 65] */ new RandomConstant("be174318a79b1f780000000000000001",
"e1632897fae352fe0000000000000000"),
/* [ 66] */ new RandomConstant("7c2e86314f363ef00000000000000001",
"c2c6512ff5c6a5fc0000000000000000"),
/* [ 67] */ new RandomConstant("f85d0c629e6c7de00000000000000001",
"858ca25feb8d4bf80000000000000000"),
/* [ 68] */ new RandomConstant("f0ba18c53cd8fbc00000000000000001",
"0b1944bfd71a97f00000000000000000"),
/* [ 69] */ new RandomConstant("e174318a79b1f7800000000000000001",
"1632897fae352fe00000000000000000"),
/* [ 70] */ new RandomConstant("c2e86314f363ef000000000000000001",
"2c6512ff5c6a5fc00000000000000000"),
/* [ 71] */ new RandomConstant("85d0c629e6c7de000000000000000001",
"58ca25feb8d4bf800000000000000000"),
/* [ 72] */ new RandomConstant("0ba18c53cd8fbc000000000000000001",
"b1944bfd71a97f000000000000000000"),
/* [ 73] */ new RandomConstant("174318a79b1f78000000000000000001",
"632897fae352fe000000000000000000"),
/* [ 74] */ new RandomConstant("2e86314f363ef0000000000000000001",
"c6512ff5c6a5fc000000000000000000"),
/* [ 75] */ new RandomConstant("5d0c629e6c7de0000000000000000001",
"8ca25feb8d4bf8000000000000000000"),
/* [ 76] */ new RandomConstant("ba18c53cd8fbc0000000000000000001",
"1944bfd71a97f0000000000000000000"),
/* [ 77] */ new RandomConstant("74318a79b1f780000000000000000001",
"32897fae352fe0000000000000000000"),
/* [ 78] */ new RandomConstant("e86314f363ef00000000000000000001",
"6512ff5c6a5fc0000000000000000000"),
/* [ 79] */ new RandomConstant("d0c629e6c7de00000000000000000001",
"ca25feb8d4bf80000000000000000000"),
/* [ 80] */ new RandomConstant("a18c53cd8fbc00000000000000000001",
"944bfd71a97f00000000000000000000"),
/* [ 81] */ new RandomConstant("4318a79b1f7800000000000000000001",
"2897fae352fe00000000000000000000"),
/* [ 82] */ new RandomConstant("86314f363ef000000000000000000001",
"512ff5c6a5fc00000000000000000000"),
/* [ 83] */ new RandomConstant("0c629e6c7de000000000000000000001",
"a25feb8d4bf800000000000000000000"),
/* [ 84] */ new RandomConstant("18c53cd8fbc000000000000000000001",
"44bfd71a97f000000000000000000000"),
/* [ 85] */ new RandomConstant("318a79b1f78000000000000000000001",
"897fae352fe000000000000000000000"),
/* [ 86] */ new RandomConstant("6314f363ef0000000000000000000001",
"12ff5c6a5fc000000000000000000000"),
/* [ 87] */ new RandomConstant("c629e6c7de0000000000000000000001",
"25feb8d4bf8000000000000000000000"),
/* [ 88] */ new RandomConstant("8c53cd8fbc0000000000000000000001",
"4bfd71a97f0000000000000000000000"),
/* [ 89] */ new RandomConstant("18a79b1f780000000000000000000001",
"97fae352fe0000000000000000000000"),
/* [ 90] */ new RandomConstant("314f363ef00000000000000000000001",
"2ff5c6a5fc0000000000000000000000"),
/* [ 91] */ new RandomConstant("629e6c7de00000000000000000000001",
"5feb8d4bf80000000000000000000000"),
/* [ 92] */ new RandomConstant("c53cd8fbc00000000000000000000001",
"bfd71a97f00000000000000000000000"),
/* [ 93] */ new RandomConstant("8a79b1f7800000000000000000000001",
"7fae352fe00000000000000000000000"),
/* [ 94] */ new RandomConstant("14f363ef000000000000000000000001",
"ff5c6a5fc00000000000000000000000"),
/* [ 95] */ new RandomConstant("29e6c7de000000000000000000000001",
"feb8d4bf800000000000000000000000"),
/* [ 96] */ new RandomConstant("53cd8fbc000000000000000000000001",
"fd71a97f000000000000000000000000"),
/* [ 97] */ new RandomConstant("a79b1f78000000000000000000000001",
"fae352fe000000000000000000000000"),
/* [ 98] */ new RandomConstant("4f363ef0000000000000000000000001",
"f5c6a5fc000000000000000000000000"),
/* [ 99] */ new RandomConstant("9e6c7de0000000000000000000000001",
"eb8d4bf8000000000000000000000000"),
/* [100] */ new RandomConstant("3cd8fbc0000000000000000000000001",
"d71a97f0000000000000000000000000"),
/* [101] */ new RandomConstant("79b1f780000000000000000000000001",
"ae352fe0000000000000000000000000"),
/* [102] */ new RandomConstant("f363ef00000000000000000000000001",
"5c6a5fc0000000000000000000000000"),
/* [103] */ new RandomConstant("e6c7de00000000000000000000000001",
"b8d4bf80000000000000000000000000"),
/* [104] */ new RandomConstant("cd8fbc00000000000000000000000001",
"71a97f00000000000000000000000000"),
/* [105] */ new RandomConstant("9b1f7800000000000000000000000001",
"e352fe00000000000000000000000000"),
/* [106] */ new RandomConstant("363ef000000000000000000000000001",
"c6a5fc00000000000000000000000000"),
/* [107] */ new RandomConstant("6c7de000000000000000000000000001",
"8d4bf800000000000000000000000000"),
/* [108] */ new RandomConstant("d8fbc000000000000000000000000001",
"1a97f000000000000000000000000000"),
/* [109] */ new RandomConstant("b1f78000000000000000000000000001",
"352fe000000000000000000000000000"),
/* [110] */ new RandomConstant("63ef0000000000000000000000000001",
"6a5fc000000000000000000000000000"),
/* [111] */ new RandomConstant("c7de0000000000000000000000000001",
"d4bf8000000000000000000000000000"),
/* [112] */ new RandomConstant("8fbc0000000000000000000000000001",
"a97f0000000000000000000000000000"),
/* [113] */ new RandomConstant("1f780000000000000000000000000001",
"52fe0000000000000000000000000000"),
/* [114] */ new RandomConstant("3ef00000000000000000000000000001",
"a5fc0000000000000000000000000000"),
/* [115] */ new RandomConstant("7de00000000000000000000000000001",
"4bf80000000000000000000000000000"),
/* [116] */ new RandomConstant("fbc00000000000000000000000000001",
"97f00000000000000000000000000000"),
/* [117] */ new RandomConstant("f7800000000000000000000000000001",
"2fe00000000000000000000000000000"),
/* [118] */ new RandomConstant("ef000000000000000000000000000001",
"5fc00000000000000000000000000000"),
/* [119] */ new RandomConstant("de000000000000000000000000000001",
"bf800000000000000000000000000000"),
/* [120] */ new RandomConstant("bc000000000000000000000000000001",
"7f000000000000000000000000000000"),
/* [121] */ new RandomConstant("78000000000000000000000000000001",
"fe000000000000000000000000000000"),
/* [122] */ new RandomConstant("f0000000000000000000000000000001",
"fc000000000000000000000000000000"),
/* [123] */ new RandomConstant("e0000000000000000000000000000001",
"f8000000000000000000000000000000"),
/* [124] */ new RandomConstant("c0000000000000000000000000000001",
"f0000000000000000000000000000000"),
/* [125] */ new RandomConstant("80000000000000000000000000000001",
"e0000000000000000000000000000000"),
/* [126] */ new RandomConstant("00000000000000000000000000000001",
"c0000000000000000000000000000000"),
/* [127] */ new RandomConstant("00000000000000000000000000000001",
"80000000000000000000000000000000")};
/**
* generate the random number that is "advance" steps
* from an initial random number of 0. This is done by
* starting with 0, and then advancing the by the
* appropriate powers of 2 of the linear congruential
* generator.
*/
public static Unsigned16 skipAhead(Unsigned16 advance) {
Unsigned16 result = new Unsigned16();
long bit_map;
bit_map = advance.getLow8();
for (int i = 0; bit_map != 0 && i < 64; i++) {
if ((bit_map & (1L << i)) != 0) {
/* advance random number by f**(2**i) (x)
*/
result.multiply(genArray[i].a);
result.add(genArray[i].c);
bit_map &= ~(1L << i);
}
}
bit_map = advance.getHigh8();
for (int i = 0; bit_map != 0 && i < 64; i++)
{
if ((bit_map & (1L << i)) != 0) {
/* advance random number by f**(2**(i + 64)) (x)
*/
result.multiply(genArray[i+64].a);
result.add(genArray[i+64].c);
bit_map &= ~(1L << i);
}
}
return result;
}
/**
* Generate the next 16 byte random number.
*/
public static void nextRand(Unsigned16 rand) {
/* advance the random number forward once using the linear congruential
* generator, and then return the new random number
*/
rand.multiply(genArray[0].a);
rand.add(genArray[0].c);
}
}
| apache-2.0 |
hardfish/justTest | cloudsigma2/src/test/java/org/jclouds/cloudsigma2/compute/CloudSigma2ComputeServiceLiveTest.java | 2208 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.cloudsigma2.compute;
import com.google.inject.Module;
import org.jclouds.compute.domain.ExecResponse;
import org.jclouds.compute.domain.NodeMetadata;
import org.jclouds.compute.domain.Template;
import org.jclouds.compute.domain.TemplateBuilder;
import org.jclouds.compute.internal.BaseComputeServiceLiveTest;
import org.jclouds.sshj.config.SshjSshClientModule;
import org.testng.annotations.Test;
@Test(groups = "live", testName = "CloudSigma2ComputeServiceLiveTest")
public class CloudSigma2ComputeServiceLiveTest extends BaseComputeServiceLiveTest {
public CloudSigma2ComputeServiceLiveTest() {
provider = "cloudsigma2";
}
@Override
protected Module getSshModule() {
return new SshjSshClientModule();
}
// CloudSigma templates require manual interaction to change the password on the first login.
// The only way to automatically authenticate to a server is to use an image that supports Cloud Init
// and provide the public key
@Override
protected Template buildTemplate(TemplateBuilder templateBuilder) {
Template template = super.buildTemplate(templateBuilder);
template.getOptions().authorizePublicKey(keyPair.get("public"));
return template;
}
@Override
protected void checkResponseEqualsHostname(ExecResponse execResponse, NodeMetadata node1) {
// CloudSigma does not return the hostname
}
}
| apache-2.0 |
nikitamarchenko/open-kilda | services/src/atdd/src/test/java/org/openkilda/atdd/floodlight/KafkaBreakException.java | 262 | package org.openkilda.atdd.floodlight;
public class KafkaBreakException extends Exception {
public KafkaBreakException(String s) {
super(s);
}
public KafkaBreakException(String s, Throwable throwable) {
super(s, throwable);
}
}
| apache-2.0 |
huihoo/olat | olat7.8/src/main/java/org/olat/presentation/framework/core/components/form/flexible/FormMultipartItem.java | 1157 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) frentix GmbH<br>
* http://www.frentix.com<br>
* <p>
*/
package org.olat.presentation.framework.core.components.form.flexible;
/**
* <h3>Description:</h3>
* <p>
* This is a marker interface to tell the flexi form that this item needs a multipart request
* <p>
* Initial Date: 08.12.2008 <br>
*
* @author Florian Gnaegi, frentix GmbH, http://www.frentix.com
*/
public interface FormMultipartItem extends FormItem {
public int getMaxUploadSizeKB();
}
| apache-2.0 |
zhangwenzhuo/Rapid | plugins/springmvc/java_src/javacommon/base/BaseSpringController.java | 4053 | package javacommon.base;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.SimpleDateFormat;
import java.util.Date;
import javacommon.util.ConvertRegisterHelper;
import javacommon.util.PageRequestFactory;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.beans.propertyeditors.CustomNumberEditor;
import org.springframework.ui.ModelMap;
import org.springframework.util.Assert;
import org.springframework.util.ReflectionUtils;
import org.springframework.web.bind.ServletRequestDataBinder;
import org.springframework.web.servlet.mvc.multiaction.MultiActionController;
import cn.org.rapid_framework.beanutils.BeanUtils;
import cn.org.rapid_framework.page.Page;
import cn.org.rapid_framework.page.PageRequest;
import com.sun.jmx.snmp.Timestamp;
public class BaseSpringController extends MultiActionController{
protected final static String CREATED_SUCCESS = "创建成功";
protected final static String UPDATE_SUCCESS = "更新成功";
protected final static String DELETE_SUCCESS = "删除成功";
static {
//注册converters
ConvertRegisterHelper.registerConverters();
}
public static void copyProperties(Object target,Object source) {
BeanUtils.copyProperties(target, source);
}
public static <T> T copyProperties(Class<T> destClass,Object orig) {
return BeanUtils.copyProperties(destClass, orig);
}
/**
* 初始化binder的回调函数.
*
* @see MultiActionController#createBinder(HttpServletRequest,Object)
*/
protected void initBinder(HttpServletRequest request, ServletRequestDataBinder binder) {
binder.registerCustomEditor(Short.class, new CustomNumberEditor(Short.class, true));
binder.registerCustomEditor(Integer.class, new CustomNumberEditor(Integer.class, true));
binder.registerCustomEditor(Long.class, new CustomNumberEditor(Long.class, true));
binder.registerCustomEditor(Float.class, new CustomNumberEditor(Float.class, true));
binder.registerCustomEditor(Double.class, new CustomNumberEditor(Double.class, true));
binder.registerCustomEditor(BigDecimal.class, new CustomNumberEditor(BigDecimal.class, true));
binder.registerCustomEditor(BigInteger.class, new CustomNumberEditor(BigInteger.class, true));
binder.registerCustomEditor(java.util.Date.class, new CustomDateEditor(new SimpleDateFormat("yyyy-MM-dd"), true));
}
public static ModelMap toModelMap(Page page,PageRequest pageRequest) {
return toModelMap("",page, pageRequest);
}
public static ModelMap toModelMap(String tableId,Page page,PageRequest pageRequest) {
ModelMap model = new ModelMap();
saveIntoModelMap(tableId,page,pageRequest,model);
return model;
}
/**
* 用于一个页面有多个extremeTable是使用
* @param tableId 等于extremeTable的tableId属性
*/
public static void saveIntoModelMap(String tableId,Page page,PageRequest pageRequest,ModelMap model){
Assert.notNull(tableId,"tableId must be not null");
Assert.notNull(page,"page must be not null");
model.addAttribute(tableId+"page", page);
model.addAttribute(tableId+"totalRows", new Integer(page.getTotalCount()));
model.addAttribute(tableId+"pageRequest", pageRequest);
model.addAttribute(tableId+"query", pageRequest);
}
public static PageRequest bindPageRequest(HttpServletRequest request,PageRequest pageRequest,String defaultSortColumns){
return PageRequestFactory.bindPageRequest(pageRequest,request, defaultSortColumns);
}
public static <T> T getOrCreateRequestAttribute(HttpServletRequest request, String key,Class<T> clazz) {
Object value = request.getAttribute(key);
if(value == null) {
try {
value = clazz.newInstance();
} catch (Exception e) {
ReflectionUtils.handleReflectionException(e);
}
request.setAttribute(key, value);
}
return (T)value;
}
}
| apache-2.0 |
mpujari/ohcount4j | src/test/java/com/blackducksoftware/ohcount4j/scan/ElixirScannerTest.java | 3123 | /*
* Copyright 2016 Black Duck Software, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.blackducksoftware.ohcount4j.scan;
import static com.blackducksoftware.ohcount4j.Entity.BLANK;
import static com.blackducksoftware.ohcount4j.Entity.CODE;
import static com.blackducksoftware.ohcount4j.Entity.COMMENT;
import org.testng.annotations.Test;
import com.blackducksoftware.ohcount4j.Language;
public class ElixirScannerTest extends AbstractBaseScannerTest {
@Test
public void basic() {
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, BLANK), "\n");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, BLANK), " \n");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, BLANK), "\t\n");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, CODE), "IO.puts \"Hello World\"\n");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, COMMENT), "# Line comment\n");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, COMMENT), "#\n");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, CODE), "IO.puts \"Hello World\" % with comment\n");
}
@Test
public void eofHandling() {
// Note lack of trailing \n in all cases below
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, BLANK), " ");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, BLANK), "\t");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, CODE), "IO.puts \"Hello World\"");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, COMMENT), "# Line comment");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, COMMENT), "#");
assertLine(Language.ELIXIR, new Line(Language.ELIXIR, CODE), "IO.puts \"Hello World\" # with comment");
}
@Test
public void helloWorld() {
String code = "# `if` expression\n"
+ "\n"
+ "if false do\n"
+ " \"This will never be seen #comment after code\"\n"
+ "else\n"
+ " \"This will\"\n"
+ "\t\n"
+ "end";
Line[] expected = {
new Line(Language.ELIXIR, COMMENT),
new Line(Language.ELIXIR, BLANK),
new Line(Language.ELIXIR, CODE),
new Line(Language.ELIXIR, CODE),
new Line(Language.ELIXIR, CODE),
new Line(Language.ELIXIR, CODE),
new Line(Language.ELIXIR, BLANK),
new Line(Language.ELIXIR, CODE),
};
assertLines(Language.ELIXIR, expected, code);
}
}
| apache-2.0 |
LucidDB/luciddb | farrago/src/net/sf/farrago/runtime/FarragoUdrInvocationFrame.java | 1449 | /*
// Licensed to DynamoBI Corporation (DynamoBI) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. DynamoBI licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
*/
package net.sf.farrago.runtime;
import java.sql.*;
import net.sf.farrago.session.*;
import org.eigenbase.enki.mdr.*;
/**
* FarragoUdrInvocationFrame represents one entry on the routine invocation
* stack for a given thread.
*
* @author John V. Sichi
* @version $Id$
*/
class FarragoUdrInvocationFrame
{
//~ Instance fields --------------------------------------------------------
FarragoRuntimeContext context;
EnkiMDSession reposSession;
FarragoSessionUdrContext udrContext;
boolean allowSql;
Connection connection;
String invokingUser;
String invokingRole;
}
// End FarragoUdrInvocationFrame.java
| apache-2.0 |
Jasig/inspektr | inspektr-audit/src/main/java/org/apereo/inspektr/audit/spi/support/ParametersAsStringResourceResolver.java | 1683 | /**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apereo.inspektr.audit.spi.support;
import org.apereo.inspektr.audit.AuditTrailManager;
import java.util.ArrayList;
import java.util.List;
/**
* Returns the parameters as an array of strings.
*
* @author Scott Battaglia
* @since 1.0.0
*/
public class ParametersAsStringResourceResolver extends AbstractAuditResourceResolver {
@Override
protected String[] createResource(final Object[] args) {
final List<String> stringArgs = new ArrayList<String>();
for (final Object arg : args) {
stringArgs.add(toResourceString(arg));
}
return stringArgs.toArray(new String[stringArgs.size()]);
}
public String toResourceString(final Object arg) {
if (auditFormat == AuditTrailManager.AuditFormats.JSON) {
return AuditTrailManager.toJson(arg);
}
return arg.toString();
}
}
| apache-2.0 |
commonsguy/cw-omnibus | NFC/WebBeam/app/src/main/java/com/commonsware/android/webbeam/BeamFragment.java | 2587 | /***
Copyright (c) 2012 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License.
Covered in detail in the book _The Busy Coder's Guide to Android Development_
https://commonsware.com/Android
*/
package com.commonsware.android.webbeam;
import android.annotation.SuppressLint;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.webkit.WebView;
import android.webkit.WebViewClient;
public class BeamFragment extends WebViewFragment {
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
}
@SuppressLint("SetJavaScriptEnabled")
@Override
public void onViewCreated(@NonNull View view,
@Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
getWebView().setWebViewClient(new BeamClient());
getWebView().getSettings().setJavaScriptEnabled(true);
loadUrl("https://google.com");
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
if (getContract().hasNFC()) {
inflater.inflate(R.menu.actions, menu);
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.beam) {
getContract().enablePush();
return(true);
}
return(super.onOptionsItemSelected(item));
}
WebBeamActivity getContract() {
return((WebBeamActivity)getActivity());
}
String getUrl() {
return(getWebView().getUrl());
}
void loadUrl(String url) {
android.util.Log.d(getClass().getSimpleName(), url);
getWebView().stopLoading();
getWebView().loadUrl(url);
}
class BeamClient extends WebViewClient {
@Override
public boolean shouldOverrideUrlLoading(WebView wv, String url) {
wv.loadUrl(url);
return(true);
}
}
}
| apache-2.0 |
zhujohnle/qbcp | QBcp/src/com/qiaoba/qbcp/db/AbsDbOperation.java | 1523 | package com.qiaoba.qbcp.db;
import com.lidroid.xutils.db.sqlite.WhereBuilder;
import com.lidroid.xutils.exception.DbException;
import com.qiaoba.qbcp.QbcpApplication;
public abstract class AbsDbOperation implements IDbOperation{
DbManager mDbManager = QbcpApplication.mApp.mQbDbManager;
@Override
public boolean saveData(EntityBase mEntity) {
try {
mDbManager.getContentDb().save(mEntity);
return true;
} catch (DbException e) {
return false;
}
}
public abstract String getDbName ();
@Override
public boolean deleteDataFromDb(String sql) {
try {
mDbManager.getContentDb().execNonQuery(sql);
return true;
} catch (DbException e) {
return false;
}
}
@Override
public boolean updateDataFromDb(String sql) {
try {
mDbManager.getContentDb().execNonQuery(sql);
return true;
} catch (DbException e) {
return false;
}
}
public DbManager getDBDbManager(){
return mDbManager;
}
public void clearDbData() {
try {
mDbManager.getContentDb().execNonQuery("delete from "+ getDbName());
} catch (DbException e) {
e.printStackTrace();
}
}
public void insertOrUpdate(EntityBase mUser,WhereBuilder mWhereBuilder){
boolean isInsertSuccess = saveData(mUser);
if (!isInsertSuccess) {
try {
getDBDbManager().getContentDb()
.update(mUser, mWhereBuilder);
} catch (DbException e) {
}
}
}
}
| apache-2.0 |
woken-ml/java-base-docker-images | java-mip/src/test/java/eu/humanbrainproject/mip/algorithms/serializers/pfa/NumericalInputDescription.java | 889 | package eu.humanbrainproject.mip.algorithms.serializers.pfa;
import eu.humanbrainproject.mip.algorithms.SimpleAlgorithm;
import java.util.Arrays;
import java.util.List;
public class NumericalInputDescription extends InputDescription<SimpleAlgorithm> {
public NumericalInputDescription(SimpleAlgorithm algorithm) {
super(algorithm);
}
@Override
protected VariableType getType(String variable) throws Exception {
return VariableType.REAL;
}
@Override
protected String getQuery() {
return "SELECT input data";
}
@Override
protected int getDataSize() throws Exception {
return 10;
}
@Override
protected String[] getVariables() {
return new String[] {"var1"};
}
@Override
protected String[] getCovariables() {
return new String[] {"num1", "num2", "num3", "num4"};
}
}
| apache-2.0 |
davinash/geode | geode-gfsh/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java | 5006 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.util;
import static java.util.stream.Collectors.toList;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.logging.log4j.Logger;
import org.apache.geode.internal.logging.MergeLogFiles;
import org.apache.geode.logging.internal.log4j.api.LogService;
import org.apache.geode.management.internal.cli.GfshParser;
import org.apache.geode.management.internal.i18n.CliStrings;
/**
* @since GemFire 7.0
*/
public class MergeLogs {
private static final Logger logger = LogService.getLogger();
public static void main(String[] args) {
if (args.length < 1 || args.length > 1) {
throw new IllegalArgumentException("Requires only 1 arguments : <targetDirName>");
}
try {
String result = mergeLogFile(args[0]).getCanonicalPath();
System.out.println("Merged logs to: " + result);
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
public static void mergeLogsInNewProcess(Path logDirectory) {
// create a new process for merging
logger.info("Exporting logs merging logs" + logDirectory);
List<String> commandList = new ArrayList<String>();
commandList.add(
System.getProperty("java.home") + File.separatorChar + "bin" + File.separatorChar + "java");
commandList.add("-classpath");
commandList.add(System.getProperty("java.class.path", "."));
commandList.add(MergeLogs.class.getName());
commandList.add(logDirectory.toAbsolutePath().toString());
ProcessBuilder procBuilder = new ProcessBuilder(commandList);
StringBuilder output = new StringBuilder();
try {
logger.info("Exporting logs now merging logs");
Process mergeProcess = procBuilder.redirectErrorStream(true).start();
mergeProcess.waitFor();
InputStream inputStream = mergeProcess.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
String line = null;
while ((line = br.readLine()) != null) {
output.append(line).append(GfshParser.LINE_SEPARATOR);
}
mergeProcess.destroy();
} catch (Exception e) {
logger.error(e.getMessage());
}
if (output.toString().contains("Merged logs to: ")) {
logger.info("Exporting logs successfully merged logs");
} else {
logger.error("Could not merge");
}
}
protected static List<File> findLogFilesToMerge(File dir) {
return FileUtils.listFiles(dir, new String[] {"log"}, true).stream().collect(toList());
}
static File mergeLogFile(String dirName) throws Exception {
Path dir = Paths.get(dirName);
List<File> logsToMerge = findLogFilesToMerge(dir.toFile());
Map<String, InputStream> logFiles = new HashMap<>();
for (int i = 0; i < logsToMerge.size(); i++) {
try {
logFiles.put(dir.relativize(logsToMerge.get(i).toPath()).toString(),
new FileInputStream(logsToMerge.get(i)));
} catch (FileNotFoundException e) {
throw new Exception(
logsToMerge.get(i) + " " + CliStrings.EXPORT_LOGS__MSG__FILE_DOES_NOT_EXIST);
}
}
PrintWriter mergedLog = null;
File mergedLogFile = null;
try {
String mergeLog = dirName + File.separator + "merge_"
+ new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new java.util.Date()) + ".log";
mergedLogFile = new File(mergeLog);
mergedLog = new PrintWriter(mergedLogFile);
MergeLogFiles.mergeLogFiles(logFiles, mergedLog);
} catch (FileNotFoundException e) {
throw new Exception(
"FileNotFoundException in creating PrintWriter in MergeLogFiles" + e.getMessage());
} catch (Exception e) {
throw new Exception("Exception in creating PrintWriter in MergeLogFiles" + e.getMessage());
}
return mergedLogFile;
}
}
| apache-2.0 |
llvasconcellos/client | app/src/main/java/org/projectbuendia/client/user/UserManager.java | 11751 | // Copyright 2015 The Project Buendia Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at: http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distrib-
// uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
// OR CONDITIONS OF ANY KIND, either express or implied. See the License for
// specific language governing permissions and limitations under the License.
package org.projectbuendia.client.user;
import android.content.OperationApplicationException;
import android.os.AsyncTask;
import android.os.RemoteException;
import com.android.volley.VolleyError;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.projectbuendia.client.events.user.ActiveUserSetEvent;
import org.projectbuendia.client.events.user.ActiveUserUnsetEvent;
import org.projectbuendia.client.events.user.KnownUsersLoadFailedEvent;
import org.projectbuendia.client.events.user.KnownUsersLoadedEvent;
import org.projectbuendia.client.events.user.KnownUsersSyncFailedEvent;
import org.projectbuendia.client.events.user.KnownUsersSyncedEvent;
import org.projectbuendia.client.events.user.UserAddFailedEvent;
import org.projectbuendia.client.events.user.UserAddedEvent;
import org.projectbuendia.client.json.JsonNewUser;
import org.projectbuendia.client.json.JsonUser;
import org.projectbuendia.client.utils.AsyncTaskRunner;
import org.projectbuendia.client.utils.EventBusInterface;
import org.projectbuendia.client.utils.Logger;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.annotation.Nullable;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Manages the available logins and the currently logged-in user.
* <p/>
* <p>All classes that care about the current active user should be able to gracefully handle the
* following event bus events:
* <ul>
* <li>{@link ActiveUserSetEvent}
* <li>{@link ActiveUserUnsetEvent}
* </ul>
* <p/>
* <p>All classes that care about all known users should additionally be able to gracefully handle
* the following event bus events:
* <ul>
* <li>{@link KnownUsersLoadedEvent}
* <li>{@link KnownUsersLoadFailedEvent}
* <li>{@link KnownUsersSyncedEvent}
* <li>{@link KnownUsersSyncFailedEvent}
* </ul>
* <p/>
* <p>All classes that care about being able to add and delete users should additionally be able
* gracefully handle the following event bus events:
* <ul>
* <li>{@link UserAddedEvent}
* <li>{@link UserAddFailedEvent}
* </ul>
* <p/>
* <p>All methods should be called on the main thread.
*/
public class UserManager {
private static final Logger LOG = Logger.create();
private final UserStore mUserStore;
private final EventBusInterface mEventBus;
private final AsyncTaskRunner mAsyncTaskRunner;
private final Set<JsonUser> mKnownUsers = new HashSet<>();
private boolean mSynced = false;
private boolean mAutoCancelEnabled = false;
private boolean mIsDirty = false;
@Nullable private AsyncTask mLastTask;
@Nullable private JsonUser mActiveUser;
/**
* Utility function for automatically canceling user load tasks to simulate network connectivity
* issues.
* TODO: Move to a fake or mock out when daggered.
*/
public void setAutoCancelEnabled(boolean autoCancelEnabled) {
mAutoCancelEnabled = autoCancelEnabled;
}
/** Resets the UserManager to its initial empty state. */
public void reset() {
mKnownUsers.clear();
mSynced = false;
}
/**
* If true, users have been recently updated and any data relying on a specific view of users
* may be out of sync.
*/
public boolean isDirty() {
return mIsDirty;
}
/** Sets whether or not users have been recently updated. */
public void setDirty(boolean shouldInvalidateFormCache) {
mIsDirty = shouldInvalidateFormCache;
}
/**
* Loads the set of all users known to the application from local cache.
* <p/>
* <p>This method will post a {@link KnownUsersLoadedEvent} if the known users were
* successfully loaded and a {@link KnownUsersLoadFailedEvent} otherwise.
* <p/>
* <p>This method will only perform a local cache lookup once per application lifetime.
*/
public void loadKnownUsers() {
if (!mSynced) {
mLastTask = new LoadKnownUsersTask();
mAsyncTaskRunner.runTask(mLastTask);
} else {
mEventBus.post(new KnownUsersLoadedEvent(ImmutableSet.copyOf(mKnownUsers)));
}
}
/** Sync users synchronously. Blocks until the list of users is synced, or interrupted. */
public void syncKnownUsersSynchronously()
throws InterruptedException, ExecutionException, RemoteException,
OperationApplicationException, UserSyncException {
onUsersSynced(mUserStore.syncKnownUsers());
}
/**
* Called when users are retrieved from the server, in order to send events and update user
* state as necessary.
*/
private void onUsersSynced(Set<JsonUser> syncedUsers) throws UserSyncException {
if (syncedUsers == null || syncedUsers.isEmpty()) {
throw new UserSyncException("Set of users retrieved from server is null or empty.");
}
ImmutableSet<JsonUser> addedUsers =
ImmutableSet.copyOf(Sets.difference(syncedUsers, mKnownUsers));
ImmutableSet<JsonUser> deletedUsers =
ImmutableSet.copyOf(Sets.difference(mKnownUsers, syncedUsers));
mKnownUsers.clear();
mKnownUsers.addAll(syncedUsers);
mEventBus.post(new KnownUsersSyncedEvent(addedUsers, deletedUsers));
if (mActiveUser != null && deletedUsers.contains(mActiveUser)) {
// TODO: Potentially clear mActiveUser here.
mEventBus.post(new ActiveUserUnsetEvent(
mActiveUser, ActiveUserUnsetEvent.REASON_USER_DELETED));
}
// If at least one user was added or deleted, the set of known users has changed.
if (!addedUsers.isEmpty() || !deletedUsers.isEmpty()) {
setDirty(true);
}
}
/** Returns the current active user or {@code null} if no user is active. */
@Nullable public JsonUser getActiveUser() {
return mActiveUser;
}
/**
* Sets the current active user or unsets it if {@code activeUser} is {@code null}, returning
* whether the operation succeeded.
* <p/>
* <p>This method will fail if the specified user is not known to the application.
* <p/>
* <p>This method will post an {@link ActiveUserSetEvent} if the active user was successfully
* set and an {@link ActiveUserUnsetEvent} if the active user was unset successfully; these
* events will be posted even if the active user did not change.
*/
public boolean setActiveUser(@Nullable JsonUser activeUser) {
@Nullable JsonUser previousActiveUser = mActiveUser;
if (activeUser == null) {
mActiveUser = null;
mEventBus.post(new ActiveUserUnsetEvent(
previousActiveUser, ActiveUserUnsetEvent.REASON_UNSET_INVOKED));
return true;
}
if (!mKnownUsers.contains(activeUser)) {
LOG.e("Couldn't switch user -- new user is not known");
return false;
}
mActiveUser = activeUser;
mEventBus.post(new ActiveUserSetEvent(previousActiveUser, activeUser));
return true;
}
/**
* Adds a user to the set of known users, both locally and on the server.
* <p/>
* <p>This method will post a {@link UserAddedEvent} if the user was added successfully and a
* {@link UserAddFailedEvent} otherwise.
*/
public void addUser(JsonNewUser user) {
checkNotNull(user);
// TODO: Validate user.
mAsyncTaskRunner.runTask(new AddUserTask(user));
}
/** Thrown when an error occurs syncing users from server. */
public static class UserSyncException extends Throwable {
public UserSyncException(String s) {
super(s);
}
}
UserManager(
UserStore userStore,
EventBusInterface eventBus,
AsyncTaskRunner asyncTaskRunner) {
mAsyncTaskRunner = checkNotNull(asyncTaskRunner);
mEventBus = checkNotNull(eventBus);
mUserStore = checkNotNull(userStore);
}
/**
* Loads known users from the database into memory.
* <p/>
* <p>Forces a network sync if the database has not been downloaded yet.
*/
private class LoadKnownUsersTask extends AsyncTask<Object, Void, Set<JsonUser>> {
@Override protected Set<JsonUser> doInBackground(Object... unusedObjects) {
if (mAutoCancelEnabled) {
cancel(true);
return null;
}
try {
return mUserStore.loadKnownUsers();
} catch (Exception e) {
// TODO: Figure out type of exception to throw.
LOG.e(e, "Load users task failed");
mEventBus.post(
new KnownUsersLoadFailedEvent(KnownUsersLoadFailedEvent.REASON_UNKNOWN));
return null;
}
}
@Override protected void onCancelled() {
LOG.w("Load users task cancelled");
mEventBus.post(
new KnownUsersLoadFailedEvent(KnownUsersLoadFailedEvent.REASON_CANCELLED));
}
@Override protected void onPostExecute(Set<JsonUser> knownUsers) {
mKnownUsers.clear();
if (knownUsers != null) {
mKnownUsers.addAll(knownUsers);
}
mSynced = true;
mEventBus.post(new KnownUsersLoadedEvent(ImmutableSet.copyOf(mKnownUsers)));
}
}
/** Adds a user to the database asynchronously. */
private final class AddUserTask extends AsyncTask<Void, Void, JsonUser> {
private final JsonNewUser mUser;
private boolean mAlreadyExists;
private boolean mFailedToConnect;
public AddUserTask(JsonNewUser user) {
mUser = checkNotNull(user);
}
@Override protected JsonUser doInBackground(Void... voids) {
try {
return mUserStore.addUser(mUser);
} catch (VolleyError e) {
if (e.getMessage() != null) {
if (e.getMessage().contains("already in use")) {
mAlreadyExists = true;
} else if (e.getMessage().contains("failed to connect")) {
mFailedToConnect = true;
}
}
return null;
}
}
@Override protected void onPostExecute(JsonUser addedUser) {
if (addedUser != null) {
mKnownUsers.add(addedUser);
mEventBus.post(new UserAddedEvent(addedUser));
// Set of known users has changed.
setDirty(true);
} else if (mAlreadyExists) {
mEventBus.post(new UserAddFailedEvent(
mUser, UserAddFailedEvent.REASON_USER_EXISTS_ON_SERVER));
} else if (mFailedToConnect) {
mEventBus.post(new UserAddFailedEvent(
mUser, UserAddFailedEvent.REASON_CONNECTION_ERROR));
} else {
mEventBus.post(new UserAddFailedEvent(mUser, UserAddFailedEvent.REASON_UNKNOWN));
}
}
}
}
| apache-2.0 |
parabuzzle/toobs | legacy/toobs-0.1/Platform/PresFramework/src/org/toobs/framework/pres/util/ParameterUtil.java | 16514 | package org.toobs.framework.pres.util;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.jxpath.JXPathContext;
import org.apache.commons.jxpath.JXPathException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.web.util.WebUtils;
import org.toobs.framework.exception.ParameterException;
import org.toobs.framework.pres.component.config.Parameter;
import org.toobs.framework.pres.doit.config.Forward;
import org.toobs.framework.util.Configuration;
@SuppressWarnings("unchecked")
public class ParameterUtil {
private static Log log = LogFactory.getLog(ParameterUtil.class);
private static List excludedParameters;
private static Map envParameters;
static {
excludedParameters = new ArrayList();
excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.THEME");
excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.THEME_RESOLVER");
excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.THEME_SOURCE");
excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.CONTEXT");
excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.LOCALE");
excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.LOCALE_RESOLVER");
excludedParameters.add("org.springframework.web.servlet.HandlerMapping.pathWithinHandlerMapping");
excludedParameters.add("hibernateFilter.FILTERED");
envParameters = new HashMap();
envParameters.put("host", Configuration.getInstance().getMainHost() );
envParameters.put("toobs.debug", Configuration.getInstance().getProperty("toobs.debug", "false") );
}
/**
* Extract the URL filename from the given request URI.
* Delegates to <code>WebUtils.extractViewNameFromUrlPath(String)</code>.
* @param uri the request URI (e.g. "/index.html")
* @return the extracted URI filename (e.g. "index")
* @see org.springframework.web.util.WebUtils#extractFilenameFromUrlPath
*/
public static String extractViewNameFromUrlPath(String uri) {
return WebUtils.extractFilenameFromUrlPath(uri);
}
public static String extractExtensionFromUrlPath(String uri) {
int lastDot = uri.indexOf(".");
if (lastDot != -1) {
return uri.substring(lastDot+1);
}
return "";
}
public static String extractContextPathFromUrlPath(String uri) {
int midSlash = uri.indexOf("/",1);
if (midSlash != -1) {
return uri.substring(1,midSlash);
}
return "";
}
public static String resoveForwardPath(Forward forwardDef, Map parameters, String urlPath) {
String forwardPath = null;
forwardPath = ((String[])ParameterUtil.resolveParam(forwardDef.getUri(), parameters))[0];
if (forwardPath != null && forwardDef.getUseContext()) {
String contextPath = ParameterUtil.extractContextPathFromUrlPath(urlPath);
forwardPath = (contextPath.length()>0 ? "/" + contextPath + "/" : "") + forwardPath;
}
return forwardPath;
}
public static Map buildParameterMap(HttpServletRequest request) {
return buildParameterMap(request, false);
}
public static Map buildParameterMap(HttpServletRequest request, boolean compCall) {
Map params = new HashMap();
HttpSession session = request.getSession();
Enumeration attributes = session.getAttributeNames();
// Session has lowest priority
while(attributes.hasMoreElements()){
String thisAttribute = (String) attributes.nextElement();
//if (session.getAttribute(thisAttribute) instanceof String) {
params.put(thisAttribute, session.getAttribute(thisAttribute));
//}
}
// Parameters next highest
params.putAll(request.getParameterMap());
// Attributes rule all
attributes = request.getAttributeNames();
while(attributes.hasMoreElements()){
String thisAttribute = (String) attributes.nextElement();
if (!excludedParameters.contains(thisAttribute)) {
if (log.isDebugEnabled()) {
log.debug("Putting " + thisAttribute + " As " + request.getAttribute(thisAttribute));
}
params.put(thisAttribute, request.getAttribute(thisAttribute));
}
}
params.put("httpQueryString", request.getQueryString());
if (compCall && request.getMethod().equals("POST")) {
StringBuffer qs = new StringBuffer();
Iterator iter = request.getParameterMap().entrySet().iterator();
int i = 0;
while (iter.hasNext()) {
Map.Entry entry = (Map.Entry)iter.next();
String key = (String)entry.getKey();
String[] value = (String[])entry.getValue();
for (int j = 0; j < value.length; j++) {
if (i > 0) qs.append("&");
qs.append(key).append("=").append(value[j]);
i++;
}
}
params.put("httpQueryString", qs.toString());
}
return params;
}
public static void mapParameters(String callingContext,
Parameter[] paramMap,
Map inParams,
Map outParams,
String scopeId) throws ParameterException {
mapParameters(callingContext, paramMap, inParams, outParams, scopeId, null);
}
public static void mapParameters(String callingContext,
Parameter[] paramMap,
Map inParams,
Map outParams,
String scopeId,
ArrayList objectList) throws ParameterException {
JXPathContext context = JXPathContext.newContext(inParams);
for(int j = 0; j < paramMap.length; j++){
Parameter thisParam = paramMap[j];
Object value = null;
String thisPath = null;
String thisName = null;
try {
if (thisParam.getScope() != null &&
!thisParam.getScope().equalsIgnoreCase("all") &&
!thisParam.getScope().equalsIgnoreCase(scopeId) ) {
continue;
}
if(!thisParam.getOverwriteExisting() && inParams.get(thisParam.getName()) != null) {
continue;
}
thisName = resolveParam(thisParam.getName(), inParams)[0];
thisPath = resolveParam(thisParam.getPath(), inParams)[0];
boolean condition = true;
if (thisParam.getCondition() != null) {
Object condObj = context.getValue(thisParam.getCondition());
if (log.isDebugEnabled()) {
log.debug("Condition Object: " + condObj);
}
if (condObj != null && condObj instanceof Boolean) {
condition = (Boolean)condObj;
}
}
if (condition) {
if (thisParam.getIsStatic()) {
value = thisPath;
} else if (thisParam.getIsObject()) {
if((objectList == null) || (objectList != null && thisParam.getObjectIndex() >= objectList.size())){
continue;
}
JXPathContext objContext = JXPathContext.newContext(objectList.get(thisParam.getObjectIndex()));
if (thisParam.getIsList()) {
Iterator iter = objContext.iterate(thisPath);
value = new ArrayList();
while (iter.hasNext()) {
((ArrayList)value).add(iter.next());
}
if (((ArrayList)value).size() == 0 && thisParam.getDefault() != null) {
((ArrayList)value).add(thisParam.getDefault());
}
} else {
value = objContext.getValue(thisPath);
}
} else if (thisParam.getIsList()) {
Object newList = inParams.get(thisName);
if (newList == null)
newList = outParams.get(thisName);
if (newList != null && !(newList instanceof ArrayList)) {
newList = new ArrayList();
((ArrayList)newList).add(value);
}
if (newList == null)
newList = new ArrayList();
value = context.getValue(thisPath);
if(value != null && value.getClass().isArray()){
Object[] valueArray = (Object[])value;
if (valueArray.length > 1) {
for (int i = 0; i < valueArray.length; i++) {
if (valueArray[i] != null && ((String)valueArray[i]).length() > 0)
((ArrayList)newList).add(valueArray[i]);
}
value = null;
} else {
value = valueArray[0];
}
}
if (value != null && !"".equals(value))
((ArrayList)newList).add(value);
value = newList;
} else {
value = context.getValue(thisPath);
if(value != null && value.getClass().isArray()){
Object[] valueArray = (Object[])value;
if (valueArray.length > 1) {
value = valueArray;
} else {
value = valueArray[0];
}
} else if (value == null && thisParam.getSessionPath() != null) {
value = context.getValue(thisParam.getSessionPath());
}
}
if (value != null && value.getClass().isArray() && thisParam.getIsList()) {
outParams.put(thisName, value);
} else if (value != null && value.getClass().isArray()) {
outParams.put(thisName, ((String[])value)[0]);
} else if (value != null && value instanceof ArrayList && ((ArrayList)value).size()>0) {
outParams.put(thisName, value);
} else if (value != null && !(value instanceof ArrayList) && String.valueOf(value).length() > 0) {
outParams.put(thisName, String.valueOf(value));
} else if (thisParam.getDefault() != null) {
String [] defVal = resolveParam(thisParam.getDefault(), inParams);
if (defVal != null) {
outParams.put(thisName, defVal[0]);
}
} else if (!thisParam.getIgnoreNull()) {
throw new ParameterException(callingContext, thisName, thisPath);
} else if (log.isDebugEnabled()){
log.debug("Param " + thisName + " evaluated to null");
}
}
} catch (Exception e) {
log.error("mapParameters - exception [name:" + thisName + " path:" + thisPath + " value:" + value + "]");
throw new ParameterException(callingContext, thisName, thisPath);
}
}
}
public static void mapOutputParameters(Parameter[] paramMap, Map paramsIn, String scopeId, ArrayList objects) {
for(int j = 0; j < paramMap.length; j++){
Parameter thisParam = paramMap[j];
if (thisParam.getScope() != null &&
!thisParam.getScope().equalsIgnoreCase("all") &&
!thisParam.getScope().equalsIgnoreCase(scopeId) ) {
continue;
}
if(!thisParam.getOverwriteExisting() && paramsIn.get(thisParam.getName()) != null) {
continue;
}
if(thisParam.getObjectIndex() >= objects.size()){
continue;
}
JXPathContext context = null;
Object value = null;
String paramName = resolveParam(thisParam.getName(), paramsIn)[0];
try {
String thisPath = resolveParam(thisParam.getPath(), paramsIn)[0];
if(thisParam.getIsStatic()){
value = thisPath;
} else {
if (thisParam.getIsList()) {
value = new ArrayList();
if (thisParam.getObjectIndex() == -1) {
for (int i = 0; i < objects.size(); i++) {
context = JXPathContext.newContext(objects.get(i));
((ArrayList)value).add(context.getValue(thisPath));
}
} else {
context = JXPathContext.newContext(objects.get(thisParam.getObjectIndex()));
Iterator iter = context.iterate(thisPath);
while (iter.hasNext()) {
((ArrayList)value).add(iter.next());
}
}
if (((ArrayList)value).size() == 0) {
if (thisParam.getDefault() != null) {
try {
((ArrayList)value).add(Integer.parseInt(thisParam.getDefault()));
} catch (NumberFormatException nfe) {
((ArrayList)value).add(thisParam.getDefault());
}
} else {
value = null;
}
}
} else {
context = JXPathContext.newContext(objects.get(thisParam.getObjectIndex()));
value = context.getValue(thisPath);
}
}
if(value != null
&& List.class.isAssignableFrom(value.getClass())
&& ((List)value).size() == 0
&& thisParam.getDefault() != null){
((List)value).add(thisPath);
}
paramsIn.put(paramName, value);
} catch (JXPathException e) {
if (thisParam.getDefault() != null) {
String[] def = resolveParam(thisParam.getDefault(), paramsIn);
if (def != null && def.length > 0) {
paramsIn.put(paramName, def[0]);
}
} else if (!thisParam.getIgnoreNull()) {
log.error("JXPathException for parameter " + paramName + " in scope " + scopeId);
// TODO This should be a BaseException
throw e;
}
}
}
}
public static void mapDoItInputParameters(Parameter[] paramMap, Map paramsIn, Map paramsOut, boolean useJXPathContext)
{
JXPathContext context = null;
if(useJXPathContext)
context = JXPathContext.newContext(paramsIn);
for(int j = 0; j < paramMap.length; j++){
Parameter thisParam = paramMap[j];
Object value = null;
if(thisParam.getIsStatic()) {
String [] valueAry = new String[1];
valueAry[0] = resolveParam(thisParam.getPath(), paramsIn)[0];
value = valueAry;
} else {
value = context.getValue(resolveParam(thisParam.getPath(), paramsIn)[0]);
if (value != null && value.getClass().isArray() && ((Object[])value).length == 1) {
value = ((Object[])value)[0];
} else if (value == null && thisParam.getDefault() != null) {
value = thisParam.getDefault();
}
}
paramsOut.put(resolveParam(thisParam.getName(), paramsIn)[0], value);
}
}
public static String[] resolveParam(Object input, Map params) {
return resolveParam(input, params, null);
}
public static String[] resolveParam(Object input, Map params, Object defaultValue) {
String[] output;
if (input != null && input.getClass().isArray()) {
output = (String[])input;
} else {
output = new String[] {(String)input};
}
if (input != null && input instanceof String && !"".equals(input)) {
char ind = ((String)input).charAt(0);
Object value;
switch (ind) {
case '$':
value = params.get(((String)input).substring(1));
if (value == null) {
if (defaultValue != null) {
value = defaultValue;
} else {
log.warn("Input variable with name " + input + " resolved to null");
return null;
}
}
if (value.getClass().isArray()) {
output = ((String[])value);
} else {
output = new String[1];
output[0] = (String)value;
}
if (log.isDebugEnabled()) {
log.debug("Input variable with name " + input + " resolved to " + output[0]);
}
break;
case '#':
value = envParameters.get(((String)input).substring(1));
if (value != null) {
if (value.getClass().isArray()) {
output = ((String[])value);
} else {
output = new String[1];
output[0] = (String)value;
}
}
break;
case '%':
if (((String)input).equalsIgnoreCase("%now")) {
output = new String[1];
output[0] = String.valueOf(new Date().getTime());
}
}
} else if (defaultValue != null) {
output = new String[1];
output[0] = (String)defaultValue;
}
return output;
}
public static void mapScriptParams(Map params, Map paramsIn) {
Iterator iter = params.keySet().iterator();
while (iter.hasNext()) {
Object key = iter.next();
paramsIn.put(key, params.get(key));
}
}
}
| apache-2.0 |