code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
"use strict"; var Activity = require("./activity"); var util = require("util"); function Falsy() { Activity.call(this); this.value = false; this.is = true; this.isNot = false; } util.inherits(Falsy, Activity); Falsy.prototype.run = function (callContext, args) { callContext.schedule(this.value, "_valueGot"); }; Falsy.prototype._valueGot = function (callContext, reason, result) { if (reason !== Activity.states.complete) { callContext.end(reason, result); return; } if (result) { callContext.schedule(this.isNot, "_done"); } else { callContext.schedule(this.is, "_done"); } }; Falsy.prototype._done = function (callContext, reason, result) { callContext.end(reason, result); }; module.exports = Falsy; //# sourceMappingURL=falsy.js.map
unbornchikken/workflow-4-node
lib/es5/activities/falsy.js
JavaScript
lgpl-3.0
823
/* * Parabuild CI licenses this file to You under the LGPL 2.1 * (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.gnu.org/licenses/lgpl-3.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.parabuild.ci.versioncontrol; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.parabuild.ci.build.AgentFailureException; import org.parabuild.ci.common.IoUtils; import org.parabuild.ci.common.StringUtils; import org.parabuild.ci.configuration.SystemConfigurationManagerFactory; import org.parabuild.ci.error.Error; import org.parabuild.ci.error.ErrorManagerFactory; import org.parabuild.ci.object.SystemProperty; import org.parabuild.ci.process.RemoteCommand; import org.parabuild.ci.process.TimeoutCallback; import org.parabuild.ci.remote.Agent; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * This class incorporates behaviour common for all version * control commands. All version control commands should inherit * this class. */ public class VersionControlRemoteCommand extends RemoteCommand { private static final Log log = LogFactory.getLog(VersionControlRemoteCommand.class); /** * Maximum number of lines to be accumulated from stderr. */ private static final int MAX_ACCUMULATED_ERROR_LINES = 10; /** * An StderrLineProcessorto process stderr output. */ private StderrLineProcessor stderrLineProcessor = new DefaultStderrLineProcessor(); private boolean respectErrorCode = false; private boolean showCurrentDirectoryInError = false; /** * Creates VersionControlRemoteCommand that uses system-wide * timeout for version control commands * * @param agent * @param addAutomaticSignatureToEnvironment * */ protected VersionControlRemoteCommand(final Agent agent, final boolean addAutomaticSignatureToEnvironment) throws AgentFailureException { super(agent, addAutomaticSignatureToEnvironment, getRetryTimes(), getRetryIntervalSecs(), getIgnoreErrorPatterns()); setTimeoutSecs(SystemConfigurationManagerFactory.getManager().getSystemVCSTimeout() * 60); setTimeoutCallback(new VersionControlTimeoutCallback(this)); if (addAutomaticSignatureToEnvironment) { try { addEnvironment("PARABUILD_CHECKOUT_DIR", agent.getCheckoutDirName()); } catch (IOException e) { IoUtils.ignoreExpectedException(e); } } showCurrentDirectoryInError = true; } /** * Helper method to remove unencrypted password from a String * containing command. */ public static String removePasswordFromDebugString(final String string) { if (StringUtils.isBlank(string)) return string; final Pattern pattern = Pattern.compile("[-]+[pPyY][\\w]*[\\W]*[\\w]*[\\W]?"); final Matcher matcher = pattern.matcher(string); return matcher.replaceAll(""); } /** * If set to true will fail if the error code returned by * command execution is not zero. * * @param respectErrorCode */ protected void setRespectErrorCode(final boolean respectErrorCode) { this.respectErrorCode = respectErrorCode; } protected static void appendCommand(final StringBuffer cmd, final String name, final String value) { cmd.append(' ').append(name); cmd.append(' ').append(value); } protected void appendCommand(final StringBuffer cmd, final String name, final int value) { appendCommand(cmd, name, Integer.toString(value)); } protected static void appendCommand(final StringBuffer cmd, final String name) { cmd.append(' ').append(name); } protected static void appendCommandIfNotBlank(final StringBuffer cmd, final String name, final String value) { if (!StringUtils.isBlank(value)) { cmd.append(' ').append(name); cmd.append(' ').append(value); } } protected static void appendCommandIfNotBlankQuoted(final StringBuffer cmd, final String name, final String value) { if (!StringUtils.isBlank(value)) { cmd.append(' ').append(name); cmd.append(' ').append(StringUtils.putIntoDoubleQuotes(value)); } } /** * Callback method - this method is called right after call to * execute. * <p/> * This method can be overriden by children to accomodate * post-execute processing such as command log analisys e.t.c. * * @param resultCode - execute command result code. */ protected void postExecute(final int resultCode) throws IOException, AgentFailureException { super.postExecute(resultCode); // if (log.isDebugEnabled()) log.debug("analyze error log"); BufferedReader reader = null; try { if (getStderrFile().exists() && getStderrFile().length() > 0) { final StringBuffer message = new StringBuffer(500); reader = new BufferedReader(new FileReader(getStderrFile())); String line = reader.readLine(); int index = 0; while (line != null && index < MAX_ACCUMULATED_ERROR_LINES) { final int code = stderrLineProcessor.processLine(index, line); switch (code) { case StderrLineProcessor.RESULT_ADD_TO_ERRORS: message.append('\n').append(" ").append(line); break; case StderrLineProcessor.RESULT_IGNORE: // NOPMD // do nothing break; default: log.warn("Unexpected code: " + code); break; } line = reader.readLine(); index++; } // trow exception if there are any accumulated messages if (message.length() > 0) { throw new IOException("Errors while executing command \"" + removePasswordFromDebugString(getCommand()) + ". \nMessage: " + message + '.' + (showCurrentDirectoryInError ? "" : " \nCurrent directory: " + remoteCurrentDir)); } } // if we are hear it means nothing has happened if (respectErrorCode && resultCode != 0) { throw new IOException("Error while executing comand \"" + removePasswordFromDebugString(getCommand()) + "\". The command returned non-zero error code: " + resultCode); } } finally { IoUtils.closeHard(reader); } } /** * Sets error output processor. If not set, {@link StderrLineProcessor} is used. * * @param stderrLineProcessor to set. */ public final void setStderrLineProcessor(final StderrLineProcessor stderrLineProcessor) { this.stderrLineProcessor = stderrLineProcessor; } /** * Returns system level retry times on error. * * @return system level retry times on error. */ private static int getRetryTimes() { return SystemConfigurationManagerFactory.getManager().getSystemPropertyValue(SystemProperty.RETRY_VCS_COMMAND_TIMES, 1); } /** * Returns system level retry interval on error. * * @return system level retry interval on error. */ private static int getRetryIntervalSecs() { return SystemConfigurationManagerFactory.getManager().getSystemPropertyValue(SystemProperty.RETRY_VCS_COMMAND_INTERVAL, 10); } /** * Returns system level retry patterns on error. * * @return system level retry patterns on error. */ private static List getIgnoreErrorPatterns() { return StringUtils.multilineStringToList(SystemConfigurationManagerFactory.getManager().getSystemPropertyValue(SystemProperty.RETRY_VCS_COMMAND_PATTERNS, SystemProperty.DEFAULT_RETRY_VCS_COMMAND_PATTERNS)); } /** * Version control's command timeout callback. Responsible for * reporting timeouts and hangs when a VCS command is * executed. */ private static final class VersionControlTimeoutCallback implements TimeoutCallback { private final VersionControlRemoteCommand commandToReportOn; /** * Constructor. * * @param commandToReportOn VersionControlRemoteCommand that * this timeout callback will report on in case of time out. */ public VersionControlTimeoutCallback(final VersionControlRemoteCommand commandToReportOn) { this.commandToReportOn = commandToReportOn; } /** * This callback method is called when watched command is * timed out but before watchdog tries to kill command. */ public void commandTimedOut() { final Error error = new Error(); error.setErrorLevel(Error.ERROR_LEVEL_ERROR); error.setBuildID(commandToReportOn.agent.getActiveBuildID()); error.setHostName(commandToReportOn.getAgentHost().getHost()); error.setDescription("Version control command timed out"); error.setDetails("Version control command \"" + removePasswordFromDebugString(commandToReportOn.getCommand()) + "\" has not exited after " + commandToReportOn.getTimeoutSecs() + " seconds. Parabuild will try to stop the command. System may require immediate attention of a build administrator."); error.setPossibleCause("Version control system has become unavailable or the timeout value is set too low."); ErrorManagerFactory.getErrorManager().reportSystemError(error); } /** * This callback method is called when watched command is * identified as hung. */ public void commandHung() { final Error error = new Error(); error.setErrorLevel(Error.ERROR_LEVEL_FATAL); error.setBuildID(commandToReportOn.agent.getActiveBuildID()); error.setHostName(commandToReportOn.getAgentHost().getHost()); error.setDescription("Version control command hung"); error.setDetails("Version control command \"" + removePasswordFromDebugString(commandToReportOn.getCommand()) + "\" hung after " + commandToReportOn.getTimeoutSecs() + " seconds timeout. System attempted and failed to stop the command. System requires immediate attention of a build administrator. The command should be stopped manually."); error.setPossibleCause("Version control system has become unavailable or time out value is set too low."); ErrorManagerFactory.getErrorManager().reportSystemError(error); } } /** * If true a error message will show the current directory. * * @param showCurrentDirectoryInError */ protected final void setShowCurrentDirectoryInError(final boolean showCurrentDirectoryInError) { this.showCurrentDirectoryInError = showCurrentDirectoryInError; } }
simeshev/parabuild-ci
src/org/parabuild/ci/versioncontrol/VersionControlRemoteCommand.java
Java
lgpl-3.0
10,806
package client; import common.Message; /** * Listener that gets informed every time when the chat client receives a new * message */ public interface ChatPlugin { /** * Get Type of Plugin bitcoded: * Bit - * 0 - 0x01 Dummy Plugin * 1 - 0x02 Authentication * 2 - 0x04 Encryption * 3 - 0x08 Filter * 4 - 0x10 UserInterface * 5 - 0x20 * 6 - 0x40 * 7 - 0x80 Error * * @return Byte containing the typeflags */ public byte getType(); /** * The given message is processed and than returned by plugin * * @param msg Message for the Plugin * @return Processed Message form Plugin */ public Message process(Message msg); }
SergiyKolesnikov/fuji
examples/Chat_casestudies/chat-carsten-schulze/features/GUI/client/ChatPlugin.java
Java
lgpl-3.0
684
/** * <copyright> * </copyright> * * $Id$ */ package orgomg.cwm.analysis.transformation.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import orgomg.cwm.analysis.transformation.TransformationPackage; import orgomg.cwm.analysis.transformation.TransformationUse; import orgomg.cwm.objectmodel.core.impl.DependencyImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Use</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link orgomg.cwm.analysis.transformation.impl.TransformationUseImpl#getType <em>Type</em>}</li> * </ul> * </p> * * @generated */ public class TransformationUseImpl extends DependencyImpl implements TransformationUse { /** * The default value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected static final String TYPE_EDEFAULT = null; /** * The cached value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected String type = TYPE_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TransformationUseImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TransformationPackage.Literals.TRANSFORMATION_USE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getType() { return type; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setType(String newType) { String oldType = type; type = newType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TransformationPackage.TRANSFORMATION_USE__TYPE, oldType, type)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TransformationPackage.TRANSFORMATION_USE__TYPE: return getType(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TransformationPackage.TRANSFORMATION_USE__TYPE: setType((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TransformationPackage.TRANSFORMATION_USE__TYPE: setType(TYPE_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TransformationPackage.TRANSFORMATION_USE__TYPE: return TYPE_EDEFAULT == null ? type != null : !TYPE_EDEFAULT.equals(type); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (type: "); result.append(type); result.append(')'); return result.toString(); } } //TransformationUseImpl
dresden-ocl/dresdenocl
plugins/org.dresdenocl.tools.CWM/src/orgomg/cwm/analysis/transformation/impl/TransformationUseImpl.java
Java
lgpl-3.0
3,832
package nullEngine.object.component.physics; import com.bulletphysics.collision.shapes.StaticPlaneShape; import math.Vector4f; import javax.vecmath.Vector3f; public class PlaneCollider extends Collider { public PlaneCollider(Vector4f normal, float buffer) { setCollisionShape(new StaticPlaneShape(new Vector3f(normal.x, normal.y, normal.z), buffer)); } }
warlockcodes/Null-Engine
Game Engine/src/nullEngine/object/component/physics/PlaneCollider.java
Java
lgpl-3.0
365
<?php namespace Guzzle\Plugin\Cookie\Exception; use Guzzle\Common\Exception\InvalidArgumentException; class InvalidCookieException extends InvalidArgumentException { }
SeekArt/IBOS
system/modules/file/extensions/alioss/libs/guzzle/plugin/Guzzle/Plugin/Cookie/Exception/InvalidCookieException.php
PHP
lgpl-3.0
171
<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="en" sourcelanguage="it_IT"> <context> <name>QObject</name> <message> <source>La versione del software installato è: </source> <translation>This version installed is: </translation> </message> <message> <source>USA LA LINEA DI COMANDO: </source> <translation>USAGE COMMAND LINE : </translation> </message> <message> <source>VISUALIZZA </source> <translation>DISPLAYS </translation> </message> <message> <source>-p or --package </source> <translation>-p or --package </translation> </message> <message> <source>Selezione del pacchetto da scaricare </source> <translation>Download package to select </translation> </message> <message> <source>-u or --url </source> <translation>-u or --url </translation> </message> <message> <source>Selezione dell&apos;indirizzo internet: ESEMPIO: http:// </source> <translation>Selectiing to url: EXAMPLE: http:// </translation> </message> <message> <source>VISUALIZZA LA VERSIONE INSTALLATA DEL SOFTWARE: </source> <translation>DISPLAYS THE SOFTWARE VERSION: </translation> </message> <message> <source>-v or --version </source> <translation>-v or --version </translation> </message> <message> <source>Versione del software </source> <translation>Software version </translation> </message> <message> <source>VISUALIZZA LE INFORMAZIONI DEL PROGRAMMA: </source> <translation>DISPLAYS THE SOFTWARE INFORMATION : </translation> </message> <message> <source>-h or --help </source> <translation>-h or --help </translation> </message> <message> <source>Informazioni del software.</source> <translation>software information.</translation> </message> <message> <source>Comando non trovato: </source> <translation>Command not found: </translation> </message> </context> <context> <name>update</name> <message> <source>Dialog</source> <translation></translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Annulla download&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Clear download&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>Installazione</source> <translation>Installation</translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Download aggiornamento&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Download update&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Installa aggiornamento&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Install update&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>Installa aggiornamento</source> <translation>Install update</translation> </message> <message> <source>Downaload aggiornamento</source> <translation>Download update</translation> </message> <message> <source>Gestore aggiornamento</source> <translation>Update management</translation> </message> <message> <source>Scaricamento in corso di: </source> <translation>Download in progress: </translation> </message> <message> <source>Velocità di scaricamento: </source> <translation>Fast download: </translation> </message> <message> <source> Dimensione: </source> <translation> Size: </translation> </message> <message> <source> Tempo stimato: </source> <translation> Elapsed time: </translation> </message> <message> <source>Download fallito: </source> <translation>Download failed: </translation> </message> <message> <source>Scaricamento completato</source> <translation>Download completed</translation> </message> <message> <source>Scaricamento annullato</source> <translation>Clear download</translation> </message> <message> <source>Errore scaricamento</source> <translation>Download error</translation> </message> <message> <source>Download fallito </source> <translation>Download failed</translation> </message> </context> </TS>
kratos83/lylibrary
update/language/English.ts
TypeScript
lgpl-3.0
4,812
require 'spec_helper' describe 'update' do before { load_simple_document } before { 2.times { SimpleDocument.create(:field1 => 10, :field2 => [10, 10]) } } context 'when passing a hash of attributes' do it 'updates documents' do SimpleDocument.update_all(:field1 => 2)['replaced'].should == 2 SimpleDocument.where(:field1 => 2).count.should == 2 end it 'replaces documents' do doc = SimpleDocument.first SimpleDocument.all.limit(1).replace_all(doc.attributes.merge('field1' => 2))['replaced'].should == 1 SimpleDocument.where(:field1 => 2).count.should == 1 end end context 'when passing a block' do it 'updates documents' do res = SimpleDocument.update_all { |doc| {:field1 => doc[:field1] * 2} } res['replaced'].should == 2 SimpleDocument.where(:field1 => 20).count.should == 2 end end context 'when using multi index' do before { SimpleDocument.index :field2, :multi => true } before { NoBrainer.sync_indexes } after { NoBrainer.drop! } before { NoBrainer.logger.level = Logger::FATAL } it 'deletes documents' do SimpleDocument.where(:field2.any => 10).update_all({:field1 => 1}) SimpleDocument.where(:field1 => 1).count.should == 2 expect { SimpleDocument.where(:field2.any => 10).update_all { |doc| {:field1 => doc[:field1] + 1 } } } .to raise_error(NoBrainer::Error::DocumentNotPersisted, /Expected type SELECTION but found DATUM/) end end end
pap/nobrainer
spec/integration/criteria/update_spec.rb
Ruby
lgpl-3.0
1,498
/* Copyright (C) 2007 National Institute For Space Research (INPE) - Brazil. This file is part of TerraMA2 - a free and open source computational platform for analysis, monitoring, and alert of geo-environmental extremes. TerraMA2 is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. TerraMA2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with TerraMA2. See LICENSE. If not, write to TerraMA2 Team at <terrama2-team@dpi.inpe.br>. */ /*! \file terrama2/core/utility/JSonUtils.hpp \brief \author Jano Simas */ #include <terralib/datatype/TimeInstantTZ.h> #include <terralib/geometry/Geometry.h> #include <terralib/geometry/Point.h> #include <algorithm> #include <functional> #include <iterator> //STL #include <limits> #include <map> #include <string> #include <unordered_map> #include <utility> #include <vector> #include "../Exception.hpp" #include "../data-model/DataSetOccurrence.hpp" #include "../../Exception.hpp" #include "../Typedef.hpp" #include "../data-model/DataManager.hpp" #include "../data-model/DataProvider.hpp" #include "../data-model/DataSeries.hpp" #include "../data-model/DataSeriesSemantics.hpp" #include "../data-model/DataSet.hpp" #include "../data-model/DataSetDcp.hpp" #include "../data-model/DataSetGrid.hpp" #include "../data-model/Project.hpp" #include "../data-model/Risk.hpp" #include "../utility/GeoUtils.hpp" #include "../utility/Logger.hpp" #include "../utility/TimeUtils.hpp" #include "../utility/Verify.hpp" #include "JSonUtils.hpp" #include "SemanticsManager.hpp" #include <QJsonArray> terrama2::core::DataProviderPtr terrama2::core::fromDataProviderJson(QJsonObject json) { if(json["class"].toString() != "DataProvider") { QString errMsg = QObject::tr("Invalid DataProvider JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(!(json.contains("id") && json.contains("project_id") && json.contains("name") && json.contains("description") && json.contains("intent") && json.contains("uri") && json.contains("active") && json.contains("data_provider_type"))) { QString errMsg = QObject::tr("Invalid DataProvider JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } std::shared_ptr<terrama2::core::DataProvider> provider = std::make_shared<terrama2::core::DataProvider>(); provider->id = json["id"].toInt(); provider->projectId = json["project_id"].toInt(); provider->name = json["name"].toString().toStdString(); provider->description = json["description"].toString().toStdString(); provider->intent = static_cast<terrama2::core::DataProviderIntent>(json["intent"].toInt()); provider->uri = json["uri"].toString().toStdString(); provider->active = json["active"].toBool(); provider->dataProviderType = json["data_provider_type"].toString().toStdString(); if(json.contains("options")) { auto obj = json["options"].toObject(); for(auto it = obj.begin(); it != obj.end(); ++it) { provider->options.emplace(it.key().toStdString(), it.value().toString().toStdString()); } } try { auto timeout = provider->options.at("timeout"); provider->timeout = std::stoi(timeout); } catch (...) { provider->timeout = 8; } return provider; } terrama2::core::DataSeriesPtr terrama2::core::fromDataSeriesJson(QJsonObject json) { if(json["class"].toString() != "DataSeries") { QString errMsg = QObject::tr("Invalid DataSeries JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(!(json.contains("id") && json.contains("data_provider_id") && json.contains("semantics") && json.contains("name") && json.contains("active") && json.contains("description"))) { QString errMsg = QObject::tr("Invalid DataSeries JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } std::shared_ptr<terrama2::core::DataSeries> dataSeries = std::make_shared<terrama2::core::DataSeries>(); dataSeries->id = json["id"].toInt(); dataSeries->dataProviderId = json["data_provider_id"].toInt(); dataSeries->semantics = SemanticsManager::getInstance().getSemantics(json["semantics"].toString().toStdString()); dataSeries->name = json["name"].toString().toStdString(); dataSeries->description = json["description"].toString().toStdString(); dataSeries->active = json["active"].toBool(); QJsonArray dataSetArray = json["datasets"].toArray(); std::function<terrama2::core::DataSetPtr(QJsonObject)> createDataSet = nullptr; switch(dataSeries->semantics.dataSeriesType) { case DataSeriesType::DCP: createDataSet = fromDataSetDcpJson; break; case DataSeriesType::OCCURRENCE: createDataSet = fromDataSetOccurrenceJson; break; case DataSeriesType::GRID: createDataSet = fromDataSetGridJson; break; case DataSeriesType::GEOMETRIC_OBJECT: case DataSeriesType::ANALYSIS_MONITORED_OBJECT: createDataSet = fromDataSetJson; break; default: { QString errMsg = QObject::tr("Invalid DataSeries JSON object.\nUnknown DataSet type."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } } for(auto json : dataSetArray) { if(json.isObject()) dataSeries->datasetList.push_back(createDataSet(json.toObject())); else throw terrama2::core::JSonParserException() << ErrorDescription(QObject::tr("Invalid DataSet JSON object.")); } return dataSeries; } void terrama2::core::addBaseDataSetData(QJsonObject json, std::shared_ptr<terrama2::core::DataSet> dataSet) { if(json["class"].toString() != "DataSet") { QString errMsg = QObject::tr("Invalid DataSet JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(!(json.contains("id") && json.contains("data_series_id") && json.contains("active") && json.contains("format"))) { QString errMsg = QObject::tr("Invalid DataSet JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } dataSet->id = json["id"].toInt(); dataSet->dataSeriesId = json["data_series_id"].toInt(); dataSet->active = json["active"].toBool(); auto formatObj= json["format"].toObject(); for(auto it = formatObj.begin(); it != formatObj.end(); ++it) { dataSet->format.emplace(it.key().toStdString(), it.value().toString().toStdString()); } } terrama2::core::DataSetPtr terrama2::core::fromDataSetDcpJson(QJsonObject json) { std::shared_ptr<terrama2::core::DataSetDcp> dataSet = std::make_shared<terrama2::core::DataSetDcp>(); addBaseDataSetData(json, dataSet); if(!json.contains("position")) { QString errMsg = QObject::tr("Invalid DataSet JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } std::string ewkt = json["position"].toString().toStdString(); auto geom = ewktToGeom(ewkt); auto point = std::dynamic_pointer_cast<te::gm::Point>(geom); if(!point.get()) { QString errMsg = QObject::tr("Invalid DataSet JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } dataSet->position = point; return dataSet; } terrama2::core::DataSetPtr terrama2::core::fromDataSetJson(QJsonObject json) { std::shared_ptr<terrama2::core::DataSet> dataSet = std::make_shared<terrama2::core::DataSet>(); addBaseDataSetData(json, dataSet); return dataSet; } terrama2::core::DataSetPtr terrama2::core::fromDataSetOccurrenceJson(QJsonObject json) { std::shared_ptr<terrama2::core::DataSet> dataSet = std::make_shared<terrama2::core::DataSetOccurrence>(); addBaseDataSetData(json, dataSet); return dataSet; } terrama2::core::DataSetPtr terrama2::core::fromDataSetGridJson(QJsonObject json) { std::shared_ptr<terrama2::core::DataSet> dataSet = std::make_shared<terrama2::core::DataSetGrid>(); addBaseDataSetData(json, dataSet); return dataSet; } terrama2::core::Filter terrama2::core::fromFilterJson(QJsonObject json, DataManager* dataManager) { if(json.empty()) return terrama2::core::Filter(); if(json["class"].toString() != "Filter") { QString errMsg = QObject::tr("Invalid Filter JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } terrama2::core::Filter filter; if(json.contains("discard_before") && !json.value("discard_before").isNull()) { std::string dateTime = json.value("discard_before").toString().toStdString(); filter.discardBefore = TimeUtils::stringToTimestamp(dateTime, terrama2::core::TimeUtils::webgui_timefacet); verify::date(filter.discardBefore); } if(json.contains("discard_after") && !json.value("discard_after").isNull()) { std::string dateTime = json["discard_after"].toString().toStdString(); filter.discardAfter = TimeUtils::stringToTimestamp(dateTime, terrama2::core::TimeUtils::webgui_timefacet); verify::date(filter.discardAfter); } if(filter.discardBefore && filter.discardAfter && (*filter.discardBefore > *filter.discardAfter)) { QString errMsg = QObject::tr("Invalid Filter JSON object./nEmpty date filter interval."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(json.contains("region") && !json.value("region").isNull()) { auto ewkt = json["region"].toString().toStdString(); filter.region = ewktToGeom(ewkt); verify::srid(filter.region->getSRID()); } if(json.contains("by_value") && !json.value("by_value").isNull()) { filter.byValue = json["by_value"].toString().toStdString(); } if(json.contains("last_values") && !json.value("last_values").isNull()) { filter.lastValues = std::make_shared<size_t>(json["last_values"].toInt()); } if(json.contains("data_series_id") && !json.value("data_series_id").isNull()) { DataSeriesId dataSeriesId = json["data_series_id"].toInt(); // Sets the data series for a static data filter if(dataSeriesId != 0) { auto dataSeries = dataManager->findDataSeries(dataSeriesId); filter.dataSeries = dataSeries; auto dataProvider = dataManager->findDataProvider(dataSeries->dataProviderId); filter.dataProvider = dataProvider; } } if (json.contains("crop_raster")) filter.cropRaster = json["crop_raster"].toBool(); else filter.cropRaster = false; return filter; } terrama2::core::LegendPtr terrama2::core::fromRiskJson(QJsonObject json) { if(json["class"].toString() != "Legend") { QString errMsg = QObject::tr("Invalid Legend JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(!(json.contains("name") && json.contains("description") && json.contains("levels"))) { QString errMsg = QObject::tr("Invalid Risk JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } std::shared_ptr<terrama2::core::Risk> risk = std::make_shared<terrama2::core::Risk>(); risk->name = json["name"].toString().toStdString(); risk->description = json["description"].toString().toStdString(); risk->id = json["id"].toInt(); auto riskLevelsArray = json["levels"].toArray(); for(const auto& value : riskLevelsArray) { auto obj = value.toObject(); terrama2::core::RiskLevel riskLevel; riskLevel.name = obj["name"].toString().toStdString(); riskLevel.value = obj["value"].toDouble(); if(obj["level"].isNull()) { //default risk riskLevel.level = std::numeric_limits<uint32_t>::max(); risk->defaultRisk = riskLevel; } else { riskLevel.level = static_cast<uint32_t>(obj["level"].toInt()); risk->riskLevels.push_back(riskLevel); } } std::sort(std::begin(risk->riskLevels), std::end(risk->riskLevels)); return risk; } QJsonObject terrama2::core::toJson(const terrama2::core::Risk& risk) { QJsonObject obj; obj.insert("class", QString("Risk")); obj.insert("name", QString::fromStdString(risk.name)); obj.insert("description", QString::fromStdString(risk.description)); QJsonArray riskArray; for(const auto& riskLevel : risk.riskLevels) { QJsonObject tempoObj; tempoObj.insert("name", QString::fromStdString(riskLevel.name)); tempoObj.insert("level", static_cast<int>(riskLevel.level)); tempoObj.insert("value", riskLevel.value); riskArray.append(tempoObj); } obj.insert("levels", riskArray); return obj; } QJsonObject terrama2::core::toJson(const terrama2::core::Filter& filter) { QJsonObject obj; obj.insert("class", QString("Filter")); if(filter.discardBefore.get()) { std::string discardBefore = TimeUtils::boostLocalTimeToString(filter.discardBefore->getTimeInstantTZ(), TimeUtils::webgui_timefacet); obj.insert("discard_before", QString::fromStdString(discardBefore)); } if(filter.discardAfter.get()) { std::string discardAfter = TimeUtils::boostLocalTimeToString(filter.discardAfter->getTimeInstantTZ(), TimeUtils::webgui_timefacet); obj.insert("discard_after", QString::fromStdString(discardAfter)); } if(filter.region.get()) { std::string region = filter.region->toString(); obj.insert("region", QString::fromStdString(region)); } obj.insert("last_values", static_cast<qint32>(*filter.lastValues.get())); if(filter.dataSeries) obj.insert("data_series_id", static_cast<int32_t>(filter.dataSeries->id)); obj.insert("by_value", QString::fromStdString(filter.byValue)); return obj; } terrama2::core::Schedule terrama2::core::fromScheduleJson(QJsonObject json) { if(json.empty()) return terrama2::core::Schedule(); if(json["class"].toString() != "Schedule") { QString errMsg = QObject::tr("Invalid Schedule JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(!(json.contains("id") && json.contains("frequency") && json.contains("frequency_unit") && json.contains("frequency_start_time") && json.contains("schedule") && json.contains("schedule_time") && json.contains("schedule_unit") && json.contains("schedule_retry") && json.contains("schedule_retry_unit") && json.contains("schedule_timeout") && json.contains("schedule_timeout_unit"))) { QString errMsg = QObject::tr("Invalid Schedule JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } terrama2::core::Schedule schedule; schedule.id = json["id"].toInt(); schedule.frequency = json["frequency"].toInt(); schedule.frequencyUnit = json["frequency_unit"].toString().toStdString(); schedule.frequencyStartTime = json["frequency_start_time"].toString().toStdString(); schedule.schedule = json["schedule"].toInt(); schedule.scheduleTime = json["schedule_time"].toString().toStdString(); schedule.scheduleUnit = json["schedule_unit"].toString().toStdString(); schedule.scheduleRetry = json["schedule_retry"].toInt(); schedule.scheduleRetryUnit = json["schedule_retry_unit"].toString().toStdString(); schedule.scheduleTimeout = json["schedule_timeout"].toInt(); schedule.scheduleTimeoutUnit = json["schedule_timeout_unit"].toString().toStdString(); if(json.contains("reprocessing_historical_data") && !json["reprocessing_historical_data"].isNull()) schedule.reprocessingHistoricalData = fromReprocessingHistoricalData(json["reprocessing_historical_data"].toObject()); return schedule; } QJsonObject terrama2::core::toJson(DataProviderPtr dataProviderPtr) { QJsonObject obj; obj.insert("class", QString("DataProvider")); obj.insert("id", static_cast<int32_t>(dataProviderPtr->id)); obj.insert("project_id", static_cast<int32_t>(dataProviderPtr->projectId)); obj.insert("name", QString::fromStdString(dataProviderPtr->name)); obj.insert("description", QString::fromStdString(dataProviderPtr->description)); obj.insert("intent", static_cast<int>(dataProviderPtr->intent)); obj.insert("uri", QString::fromStdString(dataProviderPtr->uri)); obj.insert("active", dataProviderPtr->active); obj.insert("timeout", static_cast<int>(dataProviderPtr->timeout)); obj.insert("data_provider_type", QString::fromStdString(dataProviderPtr->dataProviderType)); return obj; } QJsonObject terrama2::core::toJson(DataSeriesPtr dataSeriesPtr) { QJsonObject obj; obj.insert("class", QString("DataSeries")); obj.insert("id", static_cast<int32_t>(dataSeriesPtr->id)); obj.insert("data_provider_id", static_cast<int32_t>(dataSeriesPtr->dataProviderId)); obj.insert("semantics", QString::fromStdString(dataSeriesPtr->semantics.code)); obj.insert("name", QString::fromStdString(dataSeriesPtr->name)); obj.insert("description", QString::fromStdString(dataSeriesPtr->description)); QJsonArray array; for(const auto& dataSet : dataSeriesPtr->datasetList) array.push_back(terrama2::core::toJson(dataSet, dataSeriesPtr->semantics)); obj.insert("datasets", array); return obj; } QJsonObject terrama2::core::toJson(DataSetPtr dataSetPtr, DataSeriesSemantics semantics) { QJsonObject obj; obj.insert("class", QString("DataSet")); obj.insert("id", static_cast<int32_t>(dataSetPtr->id)); obj.insert("data_series_id", static_cast<int32_t>(dataSetPtr->dataSeriesId)); obj.insert("data_series_id", static_cast<int32_t>(dataSetPtr->dataSeriesId)); obj.insert("active", dataSetPtr->active); QJsonObject format; for(const auto & it : dataSetPtr->format) { format.insert(QString::fromStdString(it.first), QString::fromStdString(it.second)); } obj.insert("format", format); switch(semantics.dataSeriesType) { case terrama2::core::DataSeriesType::DCP : { auto dataSet = std::dynamic_pointer_cast<const DataSetDcp>(dataSetPtr); terrama2::core::addToJson(obj, dataSet); break; } case terrama2::core::DataSeriesType::OCCURRENCE : { auto dataSet = std::dynamic_pointer_cast<const DataSetOccurrence>(dataSetPtr); terrama2::core::addToJson(obj, dataSet); break; } case terrama2::core::DataSeriesType::GRID : { auto dataSet = std::dynamic_pointer_cast<const DataSetGrid>(dataSetPtr); terrama2::core::addToJson(obj, dataSet); break; } default: /* code */ break; } return obj; } void terrama2::core::addToJson(QJsonObject& obj, DataSetDcpPtr dataSetPtr) { obj.insert("position", QString::fromStdString(dataSetPtr->position->toString())); } void terrama2::core::addToJson(QJsonObject& /*obj*/, DataSetOccurrencePtr /*dataSetPtr*/) { } void terrama2::core::addToJson(QJsonObject& /*obj*/, DataSetGridPtr /*dataSetPtr*/) { } QJsonObject terrama2::core::toJson(Schedule schedule) { QJsonObject obj; obj.insert("class", QString("Schedule")); obj.insert("id", static_cast<int32_t>(schedule.id)); obj.insert("frequency",static_cast<int32_t>(schedule.frequency)); obj.insert("frequency_unit", QString::fromStdString(schedule.frequencyUnit)); obj.insert("frequency_start_time", QString::fromStdString(schedule.frequencyStartTime)); obj.insert("schedule",static_cast<int32_t>(schedule.schedule)); obj.insert("schedule_time",QString::fromStdString(schedule.scheduleTime)); obj.insert("schedule_unit",QString::fromStdString(schedule.scheduleUnit)); obj.insert("schedule_retry",static_cast<int32_t>(schedule.scheduleRetry)); obj.insert("schedule_retry_unit", QString::fromStdString(schedule.scheduleRetryUnit)); obj.insert("schedule_timeout",static_cast<int32_t>(schedule.scheduleTimeout)); obj.insert("schedule_timeout_unit", QString::fromStdString(schedule.scheduleTimeoutUnit)); obj.insert("reprocessing_historical_data", toJson(schedule.reprocessingHistoricalData)); return obj; } QJsonObject terrama2::core::toJson(terrama2::core::ReprocessingHistoricalDataPtr reprocessingHistoricalDataPtr) { QJsonObject obj; if(!reprocessingHistoricalDataPtr) return obj; obj.insert("class", QString("ReprocessingHistoricalData")); if(reprocessingHistoricalDataPtr->startDate.get()) { std::string startDate = terrama2::core::TimeUtils::boostLocalTimeToString(reprocessingHistoricalDataPtr->startDate->getTimeInstantTZ(), terrama2::core::TimeUtils::webgui_timefacet); obj.insert("start_date", QString::fromStdString(startDate)); } if(reprocessingHistoricalDataPtr->endDate.get()) { std::string endDate = terrama2::core::TimeUtils::boostLocalTimeToString(reprocessingHistoricalDataPtr->endDate->getTimeInstantTZ(), terrama2::core::TimeUtils::webgui_timefacet); obj.insert("end_date", QString::fromStdString(endDate)); } return obj; } terrama2::core::ReprocessingHistoricalDataPtr terrama2::core::fromReprocessingHistoricalData( const QJsonObject& json) { if(json.isEmpty()) { return terrama2::core::ReprocessingHistoricalDataPtr(); } if(json["class"].toString() != "ReprocessingHistoricalData") { QString errMsg(QObject::tr("Invalid ReprocessingHistoricalData JSON object.")); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(!(json.contains("start_date") && json.contains("end_date"))) { QString errMsg(QObject::tr("Invalid ReprocessingHistoricalData JSON object.")); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } ReprocessingHistoricalData* reprocessingHistoricalData = new ReprocessingHistoricalData; ReprocessingHistoricalDataPtr reprocessingHistoricalDataPtr(reprocessingHistoricalData); if(!json.value("start_date").isNull()) { std::string startDate = json["start_date"].toString().toStdString(); reprocessingHistoricalData->startDate = terrama2::core::TimeUtils::stringToTimestamp(startDate, terrama2::core::TimeUtils::webgui_timefacet); } if(!json.value("end_date").isNull()) { std::string endDate = json["end_date"].toString().toStdString(); reprocessingHistoricalData->endDate = terrama2::core::TimeUtils::stringToTimestamp(endDate, terrama2::core::TimeUtils::webgui_timefacet); } return reprocessingHistoricalDataPtr; } terrama2::core::ProjectPtr terrama2::core::fromProjectJson(QJsonObject json) { if(json.empty()) { QString errMsg = QObject::tr("Invalid Project JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(json["class"].toString() != "Project") { QString errMsg = QObject::tr("Invalid Project JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } if(!(json.contains("id") && json.contains("name") && json.contains("active"))) { QString errMsg = QObject::tr("Invalid Project JSON object."); TERRAMA2_LOG_ERROR() << errMsg; throw terrama2::core::JSonParserException() << ErrorDescription(errMsg); } auto project = std::make_shared<terrama2::core::Project>(); project->id = json["id"].toInt(); project->name = json["name"].toString().toStdString(); project->active = json["active"].toBool(); return project; } QJsonObject terrama2::core::toJson(const terrama2::core::ProjectPtr& project) { QJsonObject obj; if(!project) return obj; obj.insert("class", QString("Project")); obj.insert("id", static_cast<int32_t>(project->id)); obj.insert("name", QString::fromStdString(project->name)); obj.insert("active", project->active); return obj; }
janosimas/terrama2
src/terrama2/core/utility/JSonUtils.cpp
C++
lgpl-3.0
24,685
/* Mesquite source code. Copyright 1997-2006 W. Maddison and D. Maddison. Version 1.12, September 2006. Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code. The commenting leaves much to be desired. Please approach this source code with the spirit of helping out. Perhaps with your help we can be more than a few, and make Mesquite better. Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY. Mesquite's web site is http://mesquiteproject.org This source code and its compiled class files are free and modifiable under the terms of GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html) */ package mesquite.lib.table; import java.awt.*; import java.awt.event.*; import mesquite.lib.*; import java.io.*; /* ======================================================================== */ /** A panel for row headings in a MesquiteTable.*/ public class RowNamesPanel extends EditorPanel { MesquiteTable table; public int width, height; public RowNamesPanel (MesquiteTable table , int w, int h) { super(table); this.table=table; //setBackground(ColorDistribution.medium[table.colorScheme]); setBackground(Color.white); setTableUnitSize(w, h); } public void setTableUnitSize (int w, int h) { this.width=w; this.height=h; setSize(w, height); } public void setWidth (int w) { this.width=w; setSize(w, height); } /*@@@...............................................................................................................*/ /** returns in which column x lies, -1 if to left, -2 if to right.*/ public int findColumn(int x) { return -1; //if left of grabbers? } /*@@@...............................................................................................................*/ /** returns in which row y lies, -1 if above all rows, -2 if below all rows.*/ public int findRow(int y) { if (y<=0) return -1; int ry = 0; for (int row=table.firstRowVisible; (row<table.numRowsTotal) && (ry<y); row++) { ry += table.rowHeights[row]; if (row>= table.numRowsTotal) return -1; else if (ry>=y) return row; } return -2;//past the last row } /*@@@...............................................................................................................*/ /** returns in which column x lies, -1 if to left, -2 if to right.*/ public int findRegionInCellH(int x) { if (x<=0) return 50; return (x-startOfColumn(-1))*100/(columnWidth(-1)- startOfColumn(-1)); } /*@@@...............................................................................................................*/ /** returns in which column x lies, -1 if to left, -2 if to right.*/ public int findRegionInCellV(int y) { if (y<=0) return 50; int ry = 0; for (int row=table.firstRowVisible; (row<table.numRowsTotal) && (ry<y); row++) { ry += table.rowHeights[row]; if (row>= table.numRowsTotal) return 50; else if (ry>=y) { int dYB = ry-y; //distance from bottom edge to int dYU = y - (ry-table.rowHeights[row]); //distance from left edge to return dYU*100/(dYB+dYU); } } return 50; } public int startOfColumn(int column){ return table.getColumnGrabberWidth()-2; } public int firstColumnVisible(){ return -1; } public int numColumnsVisible(){ return 1; } public int columnWidth(int column) { //todo: why does this not subtract grabbers, but ColumnNames does? return width; } public void textReturned(int column, int row, String text, CommandRecord commandRec){ table.returnedRowNameText(row, text, commandRec); } public String getText(int column, int row){ return table.getRowNameText(row); } public void deselectCell(int column,int row){ table.deselectRowName(row); } public void redrawCell(int column, int row){ Graphics g = getGraphics(); if (g!=null) { redrawName(g, row); g.dispose(); } } public void redrawName(Graphics g, int row) { int top = table.getFirstRowVisible(); if (row<top) //TODO: should also fail to draw if to big return; if (row == returningRow){ return; //don't draw if text about to be returned to cell, and will soon be redrawn anyway } int leftSide = startOfColumn(-1); int topSide = startOfRow(row); if (topSide>getBounds().height || topSide+rowHeight(row)<0) return; Shape clip = g.getClip(); g.setClip(0,topSide,columnWidth(-1), rowHeight(row)); prepareCell(g, 1,topSide+1,columnWidth(-1), rowHeight(row)-2, table.focusRow == row, table.isRowNameSelected(row) || table.isRowSelected(row), table.getCellDimmed(-1, row), table.isRowNameEditable(row)); g.setClip(0,0, getBounds().width, getBounds().height); if (table.frameRowNames) { Color cg = g.getColor(); g.setColor(Color.gray); g.drawLine(0, topSide+rowHeight(row), width, topSide+rowHeight(row)); g.setColor(cg); } Font fnt = null; boolean doFocus = table.focusRow == row && table.boldFont !=null; if (doFocus){ fnt = g.getFont(); g.setFont(table.boldFont); } Color oldColor = g.getColor(); if (table.showRowGrabbers) { if (table.showRowNumbers) table.drawRowColumnNumber(g,row,true,0,topSide+1, table.getRowGrabberWidth(),rowHeight(row)-2); else table.drawRowColumnNumberBox(g,row,true,0,topSide+1, table.getRowGrabberWidth(),rowHeight(row)-2); g.setClip(0+table.getRowGrabberWidth(),topSide, width-table.getRowGrabberWidth(),rowHeight(row)); table.setRowNameColor(g, row); table.drawRowNameCell(g, 0+table.getRowGrabberWidth(),topSide, width-table.getRowGrabberWidth(),rowHeight(row), row); } else { g.setClip(0,topSide, width,rowHeight(row)); table.setRowNameColor(g, row); table.drawRowNameCell(g, 0,topSide, width,rowHeight(row), row); } g.setColor(oldColor); if (doFocus && fnt !=null){ g.setFont(fnt); } g.setClip(0,0, getBounds().width, getBounds().height); g.setColor(Color.black); if (table.getDropDown(-1, row)) { int offset = 0; if (table.showRowGrabbers) offset = table.getRowGrabberWidth(); dropDownTriangle.translate(1 + offset,topSide + 1); g.setColor(Color.white); g.drawPolygon(dropDownTriangle); g.setColor(Color.black); g.fillPolygon(dropDownTriangle); dropDownTriangle.translate(-(1 + offset),-(topSide + 1)); } g.setClip(clip); g.drawLine(width-1, 0, width-1, height); } public void repaint(){ checkEditFieldLocation(); super.repaint(); } public void paint(Graphics g) { if (MesquiteWindow.checkDoomed(this)) return; try { table.checkResetFont(g); int lineY = 0; int oldLineY=lineY; int resetWidth = getBounds().width; int resetHeight = getBounds().height; width = resetWidth;//this is here to test if width/height should be reset here height = resetHeight; Shape clip = g.getClip(); for (int r=table.firstRowVisible; (r<table.numRowsTotal) && (lineY<height); r++) { redrawName(g, r); } g.setClip(0,0, getBounds().width, getBounds().height); if (false && getEditing()) { TextField edit = getEditField(); if (edit!= null) edit.repaint(); } if ((endOfLastRow()>=0) && (endOfLastRow()<table.matrixHeight)) { g.setColor(ColorDistribution.medium[table.colorScheme]); g.fillRect(0, endOfLastRow()+1, getBounds().width, getBounds().height); } g.setColor(Color.black); if (table.frameRowNames) g.drawRect(0, 0, width, height-1); g.drawLine(width-1, 0, width-1, height); g.setClip(clip); } catch (Throwable e){ MesquiteMessage.warnProgrammer("Exception or Error in drawing table (RNP); details in Mesquite log file"); PrintWriter pw = MesquiteFile.getLogWriter(); if (pw!=null) e.printStackTrace(pw); } MesquiteWindow.uncheckDoomed(this); } public void print(Graphics g) { int lineY = 0; int oldLineY=lineY; Shape clip = g.getClip(); g.setClip(0,0, getBounds().width, getBounds().height); for (int r=0; (r<table.numRowsTotal); r++) { lineY += table.rowHeights[r]; g.setClip(0,oldLineY, width, table.rowHeights[r]); g.setColor(Color.black); table.drawRowNameCell(g, 0,startOfRow(r), width,rowHeight(r), r); //table.drawRowNameCell(g, 20,oldLineY, width,table.rowHeights[r], r); g.setColor(Color.black); oldLineY=lineY; } g.setClip(0,0, width, table.getTotalRowHeight()); g.setColor(Color.black); g.drawLine(width-1, 0, width-1, table.getTotalRowHeight()); g.setClip(clip); } public void OLDprint(Graphics g) { int lineY = 0; int oldLineY=lineY; int resetWidth = getBounds().width; int resetHeight = getBounds().height; width = resetWidth;//this is here to test if width/height should be reset here height = resetHeight; Shape clip = g.getClip(); for (int r=table.firstRowVisible; (r<table.numRowsTotal) && (lineY<height); r++) { lineY += table.rowHeights[r]; /* if (table.frameRowNames) { g.setColor(Color.gray); g.drawLine(0, lineY, width, lineY); } */ g.setColor(Color.black); g.setClip(0,oldLineY, width,table.rowHeights[r]); table.drawRowNameCell(g, 0,oldLineY, width,table.rowHeights[r], r); g.setClip(clip); g.setColor(Color.black); oldLineY=lineY; } g.setClip(0,0, getBounds().width, getBounds().height); g.setColor(Color.black); g.drawLine(width-1, 0, width-1, height); g.setClip(clip); } /*...............................................................................................................*/ int touchY = -1; int lastY=-1; int touchRow; int previousRowDragged = -1; /*...............................................................................................................*/ public void mouseDown(int modifiers, int clickCount, long when, int x, int y, MesquiteTool tool) { if (!(tool instanceof TableTool)) return; touchY=-1; touchRow=-1; int possibleTouch = findRow(y); int regionInCellH = findRegionInCellH(x); int regionInCellV =findRegionInCellV(y); boolean isArrowEquivalent = ((TableTool)tool).isArrowKeyOnRow(x,table); if (possibleTouch>=0 && possibleTouch<table.numRowsTotal) { if (tool != null && isArrowEquivalent && table.getUserMoveRow() && table.isRowSelected(possibleTouch) && !MesquiteEvent.shiftKeyDown(modifiers) && !MesquiteEvent.commandOrControlKeyDown(modifiers)) { touchY=y; lastY = y; touchRow=possibleTouch; table.shimmerHorizontalOn(touchY); } else if ((table.showRowGrabbers) && (x<table.getRowGrabberWidth())) { if (((TableTool)tool).getIsBetweenRowColumnTool() && !isArrowEquivalent) possibleTouch = table.findRowBeforeBetween(y); table.rowTouched(isArrowEquivalent, possibleTouch,regionInCellH, regionInCellV,modifiers); if (tool != null && isArrowEquivalent && table.getUserMoveRow() && table.isRowSelected(possibleTouch) && !MesquiteEvent.shiftKeyDown(modifiers) && !MesquiteEvent.commandOrControlKeyDown(modifiers)) { touchY=y; lastY = MesquiteInteger.unassigned;; touchRow=possibleTouch; //table.shimmerHorizontalOn(touchY); } } else if (isArrowEquivalent) { table.rowNameTouched(possibleTouch,regionInCellH, regionInCellV, modifiers,clickCount); } else if (tool!=null && ((TableTool)tool).getWorksOnRowNames()) { if (((TableTool)tool).getIsBetweenRowColumnTool()) possibleTouch = table.findRowBeforeBetween(y); touchY=y; lastY = y; touchRow=possibleTouch; table.rowNameTouched(possibleTouch,regionInCellH, regionInCellV, modifiers,clickCount); } } else if (possibleTouch==-2 && ((TableTool)tool).getWorksBeyondLastRow()) table.rowTouched(isArrowEquivalent,possibleTouch,regionInCellH, regionInCellV,modifiers); else if (tool != null && tool.isArrowTool()){ table.offAllEdits(); if (table.anythingSelected()) { table.deselectAllNotify(); table.repaintAll(); } } } /*...............................................................................................................*/ public void mouseDrag(int modifiers, int x, int y, MesquiteTool tool) { if (touchRow>=0 && tool != null) if (((TableTool)tool).isArrowKeyOnRow(x,table)) { if (table.getUserAdjustColumn()==MesquiteTable.RESIZE) { table.shimmerHorizontalOff(lastY); table.shimmerHorizontalOn(y); lastY=y; } else if (table.getUserMoveColumn()) { table.shimmerHorizontalOff(lastY); table.shimmerHorizontalOn(y); lastY=y; } } else if (((TableTool)tool).getWorksOnRowNames()) { int dragRow = findRow(y); int regionInCellH = findRegionInCellH(x); int regionInCellV =findRegionInCellV(y); ((TableTool)tool).cellDrag(-1,dragRow,regionInCellH,regionInCellV,modifiers); if (((TableTool)tool).getEmphasizeRowsOnMouseDrag()){ table.emphasizeRow(previousRowDragged,dragRow, touchRow, false, Color.blue); previousRowDragged = dragRow; } } } /*...............................................................................................................*/ public void mouseUp(int modifiers, int x, int y, MesquiteTool tool) { if (touchRow>=0 && tool != null) if (((TableTool)tool).isArrowKeyOnRow(x,table)) { if (!table.anyRowSelected()) { if (table.getUserAdjustRow()==MesquiteTable.RESIZE) { /*table.shimmerVerticalOff(lastX); int newRH = table.rowHeights[touchRow] + x-touchX; if (newRH > 16) { table.setRowHeight(touchRow, newRH); table.rowHeightsAdjusted.setBit(touchRow); table.repaintAll(); }*/ } if (table.getUserMoveRow()) table.shimmerHorizontalOff(lastY); } /*@@@*/ else { if (table.getUserMoveRow()) { table.shimmerHorizontalOff(lastY); int dropRow = table.findRowBeforeBetween(y); if (dropRow == -2) dropRow = table.getNumRows(); if (dropRow != touchRow && (dropRow != touchRow-1) && !table.isRowSelected(dropRow)) //don't move dropped on row included in selection table.selectedRowsDropped(dropRow); } else if (table.getUserAdjustRow()==MesquiteTable.RESIZE) ;//table.shimmerVerticalOff(lastX); } } else if (((TableTool)tool).getWorksOnRowNames()) { int dropRow = findRow(y); int regionInCellH = findRegionInCellH(x); int regionInCellV =findRegionInCellV(y); ((TableTool)tool).cellDropped(-1,dropRow,regionInCellH,regionInCellV,modifiers); } } /*...............................................................................................................*/ public void mouseExited(int modifiers, int x, int y, MesquiteTool tool) { if (!(table.editingAnything() || table.singleTableCellSelected()) && tool != null && tool.isArrowTool()) setWindowAnnotation("", null); setCursor(Cursor.getDefaultCursor()); int row = findRow(y); table.mouseExitedCell(modifiers, -1, -1, row, -1, tool); } /*...............................................................................................................*/ public void setCurrentCursor(int modifiers, int x, int row, MesquiteTool tool) { if (tool == null || !(tool instanceof TableTool)) setCursor(getDisabledCursor()); else if (row>=0 && row<table.numRowsTotal) { if (((TableTool)tool).isArrowKeyOnRow(x,table)) { setCursor(table.getHandCursor()); if (!(table.getUserMoveRow() && table.isRowSelected(row) && !MesquiteEvent.shiftKeyDown(modifiers) && !MesquiteEvent.controlKeyDown(modifiers))) { if (!(table.editingAnything() || table.singleTableCellSelected())) { String s = table.getRowComment(row); if (s!=null) setWindowAnnotation(s, "Footnote above refers to " + table.getRowNameText(row)); else setWindowAnnotation("", null); } } } else if (((TableTool)tool).getWorksOnRowNames()) setCursor(tool.getCursor()); else setCursor(getDisabledCursor()); } else if (((TableTool)tool).getWorksBeyondLastRow() && (row==-2)) setCursor(tool.getCursor()); else setCursor(getDisabledCursor()); } /*...............................................................................................................*/ public void mouseEntered(int modifiers, int x, int y, MesquiteTool tool) { if (table == null) return; int row = findRow(y); setCurrentCursor(modifiers, x, row, tool); table.mouseInCell(modifiers, -1, -1, row, -1, tool); } /*...............................................................................................................*/ public void mouseMoved(int modifiers, int x, int y, MesquiteTool tool) { int row = findRow(y); setCurrentCursor(modifiers, x, row, tool); table.mouseInCell(modifiers, -1, -1, row, -1, tool); } /*...............................................................................................................*/ public void tabPressed(KeyEvent e){ if (!getEditing()) return; if (table.getCellsEditable()){ e.consume(); table.editMatrixCell(0, editField.getRow()); } } }
MesquiteProject/MesquiteArchive
releases/Mesquite1.12/Mesquite Project/Source/mesquite/lib/table/RowNamesPanel.java
Java
lgpl-3.0
17,430
import sys def setup(core, object): object.setAttachment('radial_filename', 'ring/unity') object.setAttachment('objType', 'ring') object.setStfFilename('static_item_n') object.setStfName('item_ring_set_commando_utility_b_01_01') object.setDetailFilename('static_item_d') object.setDetailName('item_ring_set_commando_utility_b_01_01') object.setIntAttribute('required_combat_level', 85) object.setStringAttribute('class_required', 'Commando') object.setIntAttribute('cat_stat_mod_bonus.@stat_n:constitution_modified', 10) object.setIntAttribute('cat_stat_mod_bonus.@stat_n:strength_modified', 15) object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_devastation_bonus', 5) object.setStringAttribute('@set_bonus:piece_bonus_count_3', '@set_bonus:set_bonus_commando_utility_b_1') object.setStringAttribute('@set_bonus:piece_bonus_count_4', '@set_bonus:set_bonus_commando_utility_b_2') object.setStringAttribute('@set_bonus:piece_bonus_count_5', '@set_bonus:set_bonus_commando_utility_b_3') object.setAttachment('setBonus', 'set_bonus_commando_utility_b') return
agry/NGECore2
scripts/object/tangible/wearables/ring/item_ring_set_commando_utility_b_01_01.py
Python
lgpl-3.0
1,084
/* Mesquite source code. Copyright 1997 and onward, W. Maddison and D. Maddison. Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code. The commenting leaves much to be desired. Please approach this source code with the spirit of helping out. Perhaps with your help we can be more than a few, and make Mesquite better. Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY. Mesquite's web site is http://mesquiteproject.org This source code and its compiled class files are free and modifiable under the terms of GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html) */ package mesquite.charMatrices.ReshuffleCharacter; /*~~ */ import java.util.*; import java.awt.*; import mesquite.lib.*; import mesquite.lib.characters.*; import mesquite.lib.duties.*; /* ======================================================================== */ public class ReshuffleCharacter extends CharacterSource { public void getEmployeeNeeds(){ //This gets called on startup to harvest information; override this and inside, call registerEmployeeNeed EmployeeNeed e = registerEmployeeNeed(DataSearcher.class, getName() + " needs a source of the original character to be reshuffled.", "The source of the original character is chosen initially."); } long currentShuffle=0; int currentOriginalChar = -1; //to force a query MatrixSourceCoord dataTask; MCharactersDistribution matrix; CharacterDistribution states; int currentDataSet = 0; RandomBetween randomTaxon; MesquiteLong seed; Taxa oldTaxa =null; long originalSeed=System.currentTimeMillis(); //0L; /*.................................................................................................................*/ public boolean startJob(String arguments, Object condition, boolean hiredByName) { if (condition!=null) dataTask = (MatrixSourceCoord)hireCompatibleEmployee( MatrixSourceCoord.class, condition, "Source of matrices for Reshuffle Character"); else dataTask = (MatrixSourceCoord)hireEmployee( MatrixSourceCoord.class, "Source of matrices for Reshuffle Character"); if (dataTask == null) return sorry(getName() + " couldn't start because no source of character matrices was obtained."); currentShuffle = 0; randomTaxon= new RandomBetween(originalSeed); seed = new MesquiteLong(1); seed.setValue(originalSeed); addMenuItem("Shuffle Next Character", makeCommand("shuffleNext", this)); addMenuItem("Shuffle Previous Character", makeCommand("shufflePrevious", this)); addMenuItem("Choose Character to Shuffle", makeCommand("chooseCharacter", this)); addMenuItem("Set Seed (Reshuffle character)...", makeCommand("setSeed", this)); return true; } public void employeeQuit(MesquiteModule m){ iQuit(); } /*.................................................................................................................*/ public Snapshot getSnapshot(MesquiteFile file) { Snapshot temp = new Snapshot(); temp.addLine("getCharacterSource ", dataTask); temp.addLine("setCharacter " + CharacterStates.toExternal(currentOriginalChar)); temp.addLine("setShuffle " + CharacterStates.toExternal((int)currentShuffle)); temp.addLine("setSeed " + originalSeed); return temp; } /*.................................................................................................................*/ public Object doCommand(String commandName, String arguments, CommandChecker checker) { if (checker.compare(this.getClass(), "Does shuffle of the next character", null, commandName, "shuffleNext")) { if (currentOriginalChar>=matrix.getNumChars()-1) currentOriginalChar=0; else currentOriginalChar++; currentShuffle = 0; ((AdjustableDistribution)states).setParentCharacter(currentOriginalChar); parametersChanged(); } else if (checker.compare(this.getClass(), "Indicates which shuffle of the character", null, commandName, "setShuffle")) { long s = MesquiteLong.fromString(parser.getFirstToken(arguments)); if (s >= 0 && MesquiteLong.isCombinable(s) && s != currentShuffle){ currentShuffle = s; parametersChanged(); } } else if (checker.compare(this.getClass(), "Returns the source of matrices on which to do ordinations", null, commandName, "setCharacterSource")) { //TEMPORARY for data files using old system without coordinators if (dataTask != null) return dataTask.doCommand(commandName, arguments, checker); } else if (checker.compare(this.getClass(), "Returns employee that is character source", null, commandName, "getCharacterSource")) { return dataTask; } else if (checker.compare(this.getClass(), "Sets the random number seed to that passed", "[long integer seed]", commandName, "setSeed")) { long s = MesquiteLong.fromString(parser.getFirstToken(arguments)); if (!MesquiteLong.isCombinable(s)){ s = MesquiteLong.queryLong(containerOfModule(), "Random number seed", "Enter an integer value for the random number seed for character reshuffling", originalSeed); } if (MesquiteLong.isCombinable(s)){ originalSeed = s; parametersChanged(); //? } } else if (checker.compare(this.getClass(), "Queries user about which character to shuffle", null, commandName, "chooseCharacter")) { int ic = chooseCharacter(matrix); if (ic >= 0 && MesquiteInteger.isCombinable(ic)) { currentOriginalChar = ic; currentShuffle = 0; ((AdjustableDistribution)states).setParentCharacter(currentOriginalChar); parametersChanged(); } } else if (checker.compare(this.getClass(), "Sets which character to shuffle", "[character number]", commandName, "setCharacter")) { MesquiteInteger pos = new MesquiteInteger(0); int icNum = MesquiteInteger.fromString(arguments, pos); seed.setValue(originalSeed); if (!MesquiteInteger.isCombinable(icNum)) return null; int ic = CharacterStates.toInternal(icNum); if (matrix == null || ((ic>=0) && (ic<=matrix.getNumChars()-1))) { currentOriginalChar = ic; currentShuffle = 0; if (states !=null) ((AdjustableDistribution)states).setParentCharacter(currentOriginalChar); parametersChanged(); } } else if (checker.compare(this.getClass(), "Does shuffle of the previous character", null, commandName, "shufflePrevious")) { if (currentOriginalChar<=0) currentOriginalChar=matrix.getNumChars()-1; else currentOriginalChar--; currentShuffle = 0; ((AdjustableDistribution)states).setParentCharacter(currentOriginalChar); parametersChanged(); } else return super.doCommand(commandName, arguments, checker); return null; } private int chooseCharacter(MCharactersDistribution matrix){ if (matrix == null || matrix.getParentData()!=null) { CharacterData data = matrix.getParentData(); String[] charNames = new String[data.getNumChars()]; for (int i=0; i<data.getNumChars(); i++) charNames[i]= data.getCharacterName(i); return ListDialog.queryList(containerOfModule(), "Choose character", "Choose character to shuffle", MesquiteString.helpString,charNames, 0); } return MesquiteInteger.queryInteger(containerOfModule(), "Choose character", "Number of character to shuffle ", 1); } /*.................................................................................................................*/ public CharacterDistribution getCharacter(Taxa taxa, int ic) { dataCheck(taxa); if (matrix == null) return null; currentShuffle = ic; if (currentOriginalChar<matrix.getNumChars()&& currentOriginalChar>=0 && currentShuffle>=0) { CharacterDistribution chs = matrix.getCharacterDistribution(currentOriginalChar); if (chs == null) return null; states = (CharacterDistribution)chs.getAdjustableClone(); if (states == null) return null; ((AdjustableDistribution)states).setParentCharacter(currentOriginalChar); String mName = ""; if (matrix.getParentData() != null) mName = " of " + matrix.getParentData().getName(); states.setName( "Shuffle " + currentShuffle + " of character " + CharacterStates.toExternal(currentOriginalChar) + mName); randomTaxon.setSeed(originalSeed); for (int i=0; i < currentShuffle; i++) randomTaxon.nextInt(); randomTaxon.setSeed(randomTaxon.nextInt() + 1); //v. 1. 1 Oct 05, modified by adding 1 to prevent adjacent from simply being offsets int nT1 = states.getNumTaxa()-1; for (int i=0; i < nT1; i++) { int sh = randomTaxon.randomIntBetween(i, nT1); if (i!=sh) ((AdjustableDistribution)states).tradeStatesBetweenTaxa(i, sh); } return states; } else return null; } /*.................................................................................................................*/ public int getNumberOfCharacters(Taxa taxa) { dataCheck(taxa); if (matrix == null) return 0; else return MesquiteInteger.infinite; } /*.................................................................................................................*/ public void employeeParametersChanged(MesquiteModule employee, MesquiteModule source, Notification notification) { matrix = null; super.employeeParametersChanged( employee, source, notification); } /** Called to provoke any necessary initialization. This helps prevent the module's intialization queries to the user from happening at inopportune times (e.g., while a long chart calculation is in mid-progress)*/ public void initialize(Taxa taxa){ dataCheck(taxa); } /*.................................................................................................................*/ private void dataCheck(Taxa taxa) { if (matrix==null || oldTaxa != taxa) { matrix = dataTask.getCurrentMatrix(taxa); if (matrix == null) currentOriginalChar = 0; else if (currentOriginalChar<0 || currentOriginalChar>= matrix.getNumChars()) { if (!MesquiteThread.isScripting()) currentOriginalChar = chooseCharacter(matrix); if (!MesquiteInteger.isCombinable(currentOriginalChar) || currentOriginalChar<0 || currentOriginalChar>=matrix.getNumChars()) currentOriginalChar = 0; } currentShuffle = 0; oldTaxa = taxa; } } /*.................................................................................................................*/ /** returns the name of character ic*/ public String getCharacterName(Taxa taxa, int ic){ return "Shuffle " + ic + " of character "; } /*.................................................................................................................*/ public String getParameters() { if (matrix==null) return ""; return "Character reshuffle: " + matrix.getName() + ". [seed: " + originalSeed + "]"; } /*.................................................................................................................*/ public String getName() { return "Reshuffle Character"; } /*.................................................................................................................*/ public boolean showCitation() { return true; } /*.................................................................................................................*/ public boolean isPrerelease() { return false; } /*.................................................................................................................*/ /** returns an explanation of what the module does.*/ public String getExplanation() { return "Supplies characters that are reshufflings of an existing character." ; } /*.................................................................................................................*/ public CompatibilityTest getCompatibilityTest() { return new CharacterStateTest(); } }
wmaddisn/MesquiteCore
Source/mesquite/charMatrices/ReshuffleCharacter/ReshuffleCharacter.java
Java
lgpl-3.0
12,391
import sys from services.spawn import MobileTemplate from services.spawn import WeaponTemplate from resources.datatables import WeaponType from resources.datatables import Difficulty from resources.datatables import Options from java.util import Vector def addTemplate(core): mobileTemplate = MobileTemplate() mobileTemplate.setCreatureName('tatooine_opening_jano') mobileTemplate.setLevel(1) mobileTemplate.setDifficulty(Difficulty.NORMAL) mobileTemplate.setSocialGroup("township") mobileTemplate.setOptionsBitmask(Options.INVULNERABLE | Options.CONVERSABLE) templates = Vector() templates.add('object/mobile/shared_dressed_tatooine_opening_jano.iff') mobileTemplate.setTemplates(templates) weaponTemplates = Vector() weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic') weaponTemplates.add(weapontemplate) mobileTemplate.setWeaponTemplateVector(weaponTemplates) attacks = Vector() mobileTemplate.setDefaultAttack('creatureMeleeAttack') mobileTemplate.setAttacks(attacks) core.spawnService.addMobileTemplate('jano', mobileTemplate) return
ProjectSWGCore/NGECore2
scripts/mobiles/generic/static/tatooine/jano.py
Python
lgpl-3.0
1,147
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Diagnostics; namespace CSharpSyntax { public sealed partial class SwitchLabelSyntax : SyntaxNode { public CaseOrDefault Kind { get; set; } private ExpressionSyntax _value; public ExpressionSyntax Value { get { return _value; } set { if (_value != null) RemoveChild(_value); _value = value; if (_value != null) AddChild(_value); } } public SwitchLabelSyntax() : base(SyntaxKind.SwitchLabel) { } public override IEnumerable<SyntaxNode> ChildNodes() { if (Value != null) yield return Value; } [DebuggerStepThrough] public override void Accept(ISyntaxVisitor visitor) { if (!visitor.Done) visitor.VisitSwitchLabel(this); } [DebuggerStepThrough] public override T Accept<T>(ISyntaxVisitor<T> visitor) { return visitor.VisitSwitchLabel(this); } } }
modulexcite/CSharpSyntax
CSharpSyntax/Generated/SwitchLabelSyntax.cs
C#
lgpl-3.0
1,355
#include "forme.h" /// /// \brief Forme::Forme /// Forme::Forme() { } /// /// \brief Forme::GetSize /// \return Nombre de points constituant la forme /// int Forme::GetSize() const { return L.size(); } /// /// \brief Forme::GetPoint /// \param i Indice du point /// \return Retourne le i-ème point(s) de la forme /// Nécessite que l'indice soit VALIDE. /// QPointF Forme::GetPoint(int i) const { return L.at(i); } /// /// \brief Forme::AddPoint Ajoute le point P à la forme /// \param P QPointF /// /// void Forme::AddPoint(const QPointF &P) { L.append(P); } /// /// \brief operator == Teste si les deux formes sont egales /// \param A Forme 1 /// \param B Forme 2 /// \return /// bool operator ==(Forme const &A, Forme const &B) { if (A.GetSize() != B.GetSize()) return false; for(int i=0; i<A.GetSize(); ++i) { if (A.GetPoint(i)!=B.GetPoint(i)) return false; } return true; } /// /// \brief Forme::generateExisting Génére une forme par défaut /// \n n=0 : Segment /// \n n=1 : Triangle /// \param n /// void Forme::generateExisting(quint32 n) { if(n==0) { //Segment0-1 this->AddPoint(QPointF(0.,0.)); this->AddPoint(QPointF(1.,0.)); } else if(n==1) { //Triangle this->AddPoint(QPointF(0.,0.)); this->AddPoint(QPointF(1.,0.)); this->AddPoint(QPointF(1./2.,qSqrt(3./4.))); } }
Alexsaphir/Fractale
forme.cpp
C++
lgpl-3.0
1,329
/** * */ package com.databasepreservation.model.data; import java.io.InputStream; import java.sql.Blob; import com.databasepreservation.common.BlobInputStreamProvider; import com.databasepreservation.common.InputStreamProvider; import com.databasepreservation.common.TemporaryPathInputStreamProvider; import com.databasepreservation.model.exception.ModuleException; /** * Represents a cell of BLOB type * * @author Luis Faria <lfaria@keep.pt> * @author Bruno Ferreira <bferreira@keep.pt> */ public class BinaryCell extends Cell implements InputStreamProvider { private InputStreamProvider inputStreamProvider; /** * Creates a binary cell. This binary cell will mostly just be a wrapper * around the SQL Blob object. * * @param id * the cell id * @param blob * the SQL Blob object, where the blob value will be read from */ public BinaryCell(String id, Blob blob) { super(id); inputStreamProvider = new BlobInputStreamProvider(blob); } /** * Creates a binary cell. The binary contents are read and saved to a * temporary file, so they can be read later without keeping an open * InputStreams. * * The inputStream is closed after use. * * @param id * the cell id * @param inputStream * to read the data. It will be closed. * @throws ModuleException * if some IO problem occurs. The stream will still be closed. */ public BinaryCell(String id, InputStream inputStream) throws ModuleException { super(id); inputStreamProvider = new TemporaryPathInputStreamProvider(inputStream); } /** * Creates a binary cell. This binary cell is a wrapper around a * ProvidesInputStream object (whilst also providing Cell functionality). * * @param id * the cell id * @param inputStreamProvider * the inputStream provider used to read BLOB data */ public BinaryCell(String id, InputStreamProvider inputStreamProvider) { super(id); this.inputStreamProvider = inputStreamProvider; } @Override public InputStream createInputStream() throws ModuleException { return inputStreamProvider.createInputStream(); } @Override public void cleanResources() { inputStreamProvider.cleanResources(); } @Override public long getSize() throws ModuleException { return inputStreamProvider.getSize(); } }
magenta-aps/db-preservation-toolkit
dbptk-model/src/main/java/com/databasepreservation/model/data/BinaryCell.java
Java
lgpl-3.0
2,407
package net.java.otr4j; import net.java.otr4j.session.SessionID; /** * This interface should be implemented by the host application. It notifies * about session status changes. * * @author George Politis * */ public interface OtrEngineListener { public abstract void sessionStatusChanged(SessionID sessionID); public abstract void multipleInstancesDetected(SessionID sessionID); public abstract void outgoingSessionChanged(SessionID sessionID); }
AndriyGol/AndroidOTR
src/main/java/net/java/otr4j/OtrEngineListener.java
Java
lgpl-3.0
462
// These are concatenated md5 and sha1 fingerprints for the Firefox and // Microsoft root CAs as of Aug 2010 var root_ca_hashes = { '00531D1D7201D423C820D00B6088C5D143DDB1FFF3B49B73831407F6BC8B975023D07C50' : true, '015A99C3D64FA94B3C3BB1A3AB274CBFFC219A76112F76C1C508833C9A2FA2BA84AC087A' : true, '019408DE857F8D806CE602CA89522848750251B2C632536F9D917279543C137CD721C6E0' : true, '0208EE8CAAB8387A6824DCB4E26A52337E206939CC5FA883635F64C750EBF5FDA9AEE653' : true, '0226C3015E08303743A9D07DCF37E6BF323C118E1BF7B8B65254E2E2100DD6029037F096' : true, '034287D7C1167D18AFA4703CB8312C3E4EF2E6670AC9B5091FE06BE0E5483EAAD6BA32D9' : true, '03DC08EEC4703FFA20E5E179E81AE7C59ED18028FB1E8A9701480A7890A59ACD73DFF871' : true, '044BFDC96CDA2A32857C598461468A64BEB5A995746B9EDF738B56E6DF437A77BE106B81' : true, '0468E9247E41CED76C441630703DDDB9AB16DD144ECDC0FC4BAAB62ECF0408896FDE52B7' : true, '068690F2195471FDDD3DE6EEA161CAFF7030AABF8432A800666CCCC42A887E42B7553E2B' : true, '069F6979166690021B8C8CA2C3076F3A627F8D7827656399D27D7F9044C9FEB3F33EFA9A' : true, '06F0171EB1E961ED7A363CA594A1374AFAAA27B8CAF5FDF5CDA98AC3378572E04CE8F2E0' : true, '06F9EBECCC569D88BA90F5BAB01AE00216D424FE9610E17519AF232BB68774E24144BE6E' : true, '076192047EA6B9CD5E6B007AE3BF1D0434D499426F9FC2BB27B075BAB682AAE5EFFCBA74' : true, '087C581F522B44B43B79CD01F8C5C3C995E6ADF8D77146024DD56A21B2E73FCDF23B35FF' : true, '0B092C1CD721866F94376FE6A7F3224D0409565B77DA582E6495AC0060A72354E64B0192' : true, '0C412F135BA054F596662D7ECD0E03F4DA79C1711150C23439AA2B0B0C62FD55B2F9F580' : true, '0C5ADD5AAE29F7A77679FA4151FEF035B865130BEDCA38D27F69929420770BED86EFBC10' : true, '0C7FDD6AF42AB9C89BBD207EA9DB5C3760D68974B5C2659E8A0FC1887C88D246691B182C' : true, '0CF89E17FCD403BDE68D9B3C0587FE8433A335C23CE8034B04E13DE5C48E791AEB8C3204' : true, '0E40A76CDE035D8FD10FE4D18DF96CA9A9E9780814375888F20519B06D2B0D2B6016907D' : true, '0EFA4BF7D760CD65F7A7068857986239D29F6C98BEFC6D986521543EE8BE56CEBC288CF3' : true, '0FA01300C3558AB7D37E2D04739EDE3C8B1A1106B8E26B232980FD652E6181376441FD11' : true, '100EADF35C841D8E035F2DC93937F552742CDF1594049CBF17A2046CC639BB3888E02E33' : true, '10FC635DF6263E0DF325BE5F79CD6767742C3192E607E424EB4549542BE1BBC53E6174E2' : true, '119279403CB18340E5AB664A679280DFA9628F4B98A91B4835BAD2C1463286BB66646A8C' : true, '14F108AD9DFA64E289E71CCFA8AD7D5E3921C115C15D0ECA5CCB5BC4F07D21D8050B566A' : true, '155EF5117AA2C1150E927E66FE3B84C3B38FECEC0B148AA686C3D00F01ECC8848E8085EB' : true, '15ACA5C2922D79BCE87FCB67ED02CF36E7B4F69D61EC9069DB7E90A7401A3CF47D4FE8EE' : true, '15B298A354704048703A375582C45AFA0048F8D37B153F6EA2798C323EF4F318A5624A9E' : true, '15EE9F5AA08528DF6BDD34A3A056D8307F8A77836BDC6D068F8B0737FCC5725413068CA4' : true, '160A1613C17FF01D887EE3D9E71261CCF88015D3F98479E1DA553D24FD42BA3F43886AEF' : true, '173574AF7B611CEBF4F93CE2EE40F9A2925A8F8D2C6D04E0665F596AFF22D863E8256F3F' : true, '1802B00127036A191B323B83DE9AA985D6BF7994F42BE5FA29DA0BD7587B591F47A44F22' : true, '1898C0D6E93AFCF9B0F50CF74B014417FAB7EE36972662FB2DB02AF6BF03FDE87C4B2F9B' : true, '18AE695D15CAB917673267D597B260C04BA7B9DDD68788E12FF852E1A024204BF286A8F6' : true, '1AD00CB9A6E68A3B6E95860C5B8CD8195A4D0E8B5FDCFDF64E7299A36C060DB222CA78E4' : true, '1B2E00CA2606903DADFE6F1568D36BB367650DF17E8E7E5B8240A4F4564BCFE23D69C6F0' : true, '1BD75F76734CC0DC98CA442BCC0F78DD31E2C52CE1089BEFFDDADB26DD7C782EBC4037BD' : true, '1C4BE2C62DB9AC3114F4400769CB1F4011C5B5F75552B011669C2E9717DE6D9BFF5FA810' : true, '1D3554048578B03F42424DBF20730A3F02FAF3E291435468607857694DF5E45B68851868' : true, '1D6496AF2D821A300BA0620D76BC53AA7FBB6ACD7E0AB438DAAF6FD50210D007C6C0829C' : true, '1E240EA0F876D785A3F5F8A1493D2EBAFD1ED1E2021B0B9F73E8EB75CE23436BBCC746EB' : true, '1E42950233926BB95FC07FDAD6B24BFCCCAB0EA04C2301D6697BDD379FCD12EB24E3949D' : true, '1E74C3863C0C35C53EC27FEF3CAA3CD9209900B63D955728140CD13622D8C687A4EB0085' : true, '200B4A7A88A7A942868A5F74567B880593E6AB220303B52328DCDA569EBAE4D1D1CCFB65' : true, '206BD68B4A8F48ABE488090DE5651A500CFD83DBAE44B9A0C8F676F3B570650B94B69DBF' : true, '2124A681C1D8F219AF4998E39DFE0BF46A174570A916FBE84453EED3D070A1D8DA442829' : true, '21BC82AB49C4133B4BB22B5C6B909C198BAF4C9B1DF02A92F7DA128EB91BACF498604B6F' : true, '21D84C822B990933A2EB14248D8E5FE84054DA6F1C3F4074ACED0FECCDDB79D153FB901D' : true, '21EFB85040393F756F27FEE3EA5870EBA59C9B10EC7357515ABB660C4D94F73B9E6E9272' : true, '222DA601EA7C0AF7F06C56433F7776D3FEB8C432DCF9769ACEAE3DD8908FFD288665647D' : true, '224D8F8AFCF735C2BB5734907B8B22163E2BF7F2031B96F38CE6C4D8A85D3E2D58476A0F' : true, '246DABD2F2EA4A66AE5BBCAE50AD6E56F9DD19266B2043F1FE4B3DCB0190AFF11F31A69D' : true, '2477D9A891D13BFA882DC2FFF8CD3393D8C5388AB7301B1B6ED47AE645253A6F9F1A2761' : true, '252AC6C5896839F9557202165EA39ED23C71D70E35A5DAA8B2E3812DC3677417F5990DF3' : true, '255BA669B87BF8780DC18FA6EAE47063FA0882595F9CA6A11ECCBEAF65C764C0CCC311D0' : true, '257ABA832EB6A20BDAFEF5020F08D7AD81968B3AEF1CDC70F5FA3269C292A3635BD123D3' : true, '259DCF5EB3259D95B93F00865F47943D43F9B110D5BAFD48225231B0D0082B372FEF9A54' : true, '266D2C1998B6706838505419EC9034600B77BEBBCB7AA24705DECC0FBD6A02FC7ABD9B52' : true, '27DE36FE72B70003009DF4F01E6C0424DE3F40BD5093D39B6C60F6DABC076201008976C9' : true, '27EC3947CDDA5AAFE29A016521A94CBB4D2378EC919539B5007F758F033B211EC54D8BCF' : true, '2A5D003739469475397B11A6F29341E13F85F2BB4A62B0B58BE1614ABB0D4631B4BEF8BA' : true, '2A954ECA79B2874573D92D90BAF99FB6A43489159A520F0D93D032CCAF37E7FE20A8B419' : true, '2B508718392D3BFFC3917F2D7DC08A97B19DD096DCD4E3E0FD676885505A672C438D4E9C' : true, '2B7020568682A018C807531228702172F17F6FB631DC99E3A3C87FFE1CF1811088D96033' : true, '2C20269DCB1A4A0085B5B75AAEC201378C96BAEBDD2B070748EE303266A0F3986E7CAE58' : true, '2C6F17A39562012065D2076EFCB83F6DB1EAC3E5B82476E9D50B1EC67D2CC11E12E0B491' : true, '2C8C175EB154AB9317B5365ADBD1C6F2A073E5C5BD43610D864C21130A855857CC9CEA46' : true, '2C8F9F661D1890B147269D8E86828CA96252DC40F71143A22FDE9EF7348E064251B18118' : true, '2CC2B0D5D622C52E901EF4633F0FBB324A058FDFD761DB21B0C2EE48579BE27F42A4DA1C' : true, '2DBBE525D3D165823AB70EFAE6EBE2E1B3EAC44776C9C81CEAF29D95B6CCA0081B67EC9D' : true, '2E03FDC5F5D72B9464C1BE8931F1169B96995C7711E8E52DF9E34BECEC67D3CBF1B6C4D2' : true, '30C908DDD73E63A4092814C74EB97E2CCFE4313DBA05B8A7C30063995A9EB7C247AD8FD5' : true, '30C9E71E6BE614EB65B216692031674D3BC0380B33C3F6A60C86152293D9DFF54B81C004' : true, '31853C62949763B9AAFD894EAF6FE0CF1F4914F7D874951DDDAE02C0BEFD3A2D82755185' : true, '324A4BBBC863699BBE749AC6DD1D4624AD7E1C28B064EF8F6003402014C3D0E3370EB58A' : true, '3327D16CFC9185FC8C7E98FA854EF305E70715F6F728365B5190E271DEE4C65EBEEACAF3' : true, '33B784F55F27D76827DE14DE122AED6F0747220199CE74B97CB03D79B264A2C855E933FF' : true, '343339FC6D033A8FA25385443270DEC45E5A168867BFFF00987D0B1DC2AB466C4264F956' : true, '34FCB8D036DB9E14B3C2F2DB8FE494C7379A197B418545350CA60369F33C2EAF474F2079' : true, '354895364A545A72968EE064CCEF2C8CC90D1BEA883DA7D117BE3B79F4210E1A5894A72D' : true, '370971C4AFEB7501AE636C3016BFD1E5A399F76F0CBF4C9DA55E4AC24E8960984B2905B6' : true, '3741491B18569A26F5ADC266FB40A54C4313BB96F1D5869BC14E6A92F6CFF63469878237' : true, '3785445332451F20F0F395E125C4434EF48B11BFDEABBE94542071E641DE6BBE882B40B9' : true, '37A56ED4B1258497B7FD56157AF9A200B435D4E1119D1C6690A749EBB394BD637BA782B7' : true, '3916AAB96A41E11469DF9E6C3B72DCB6879F4BEE05DF98583BE360D633E70D3FFE9871AF' : true, '3AB2DE229A209349F9EDC8D28AE7680D36863563FD5128C7BEA6F005CFE9B43668086CCE' : true, '3AE550B039BEC7463633A1FE823E8D943CBB5DE0FCD6397C0588E56697BD462ABDF95C76' : true, '3B0AE4BB416A84B39D2C575E6542BE478E1032E9245944F84791983EC9E829CB1059B4D3' : true, '3C4C25CC0A19CAEE6AEB55160086725F23E833233E7D0CC92B7C4279AC19C2F474D604CA' : true, '3D4129CB1EAA1174CD5DB062AFB0435BDDE1D2A901802E1D875E84B3807E4BB1FD994134' : true, '3E455215095192E1B75D379FB187298AB1BC968BD4F49D622AA89A81F2150152A41D829C' : true, '3E80175BADD77C104BF941B0CF1642B000EA522C8A9C06AA3ECCE0B4FA6CDC21D92E8099' : true, '3F459639E25087F7BBFE980C3C2098E62AC8D58B57CEBF2F49AFF2FC768F511462907A41' : true, '400125068D21436A0E43009CE743F3D5F9CD0E2CDA7624C18FBDF0F0ABB645B8F7FED57A' : true, '410352DC0FF7501B16F0028EBA6F45C5DAC9024F54D8F6DF94935FB1732638CA6AD77C13' : true, '41B807F7A8D109EEB49A8E704DFC1B787A74410FB0CD5C972A364B71BF031D88A6510E9E' : true, '4265CABE019A9A4CA98C4149CDC0D57F293621028B20ED02F566C532D1D6ED909F45002F' : true, '42769768CFA6B43824AAA11BF267DECA4178AB4CBFCE7B4102ACDAC4933E6FF50DCF715C' : true, '4281A0E21CE35510DE558942659622E6E0B4322EB2F6A568B654538448184A5036874384' : true, '429BD669C6D445AD2E81511D355A89624F555CE20DCD3364E0DC7C41EFDD40F50356C122' : true, '45E1A572C5A93664409EF5E45884678C6B2F34AD8958BE62FDB06B5CCEBB9DD94F4E39F3' : true, '45F750114EC5ADBD53688663EC7B6AE1C09AB0C8AD7114714ED5E21A5A276ADCD5E7EFCB' : true, '468C210EAB92214659DBA6DB0061DE265A5A4DAF7861267C4B1F1E67586BAE6ED4FEB93F' : true, '48D11E627801C26E4369A42CEE130AB564902AD7277AF3E32CD8CC1DC79DE1FD7F8069EA' : true, '4963AE27F4D5953DD8DB2486B89C0753D3C063F219ED073E34AD5D750B327629FFD59AF2' : true, '497904B0EB8719AC47B0BC11519B74D0D1EB23A46D17D68FD92564C2F1F1601764D8E349' : true, '49EFA6A1F0DE8EA76AEE5B7D1E5FC4463E42A18706BD0C9CCF594750D2E4D6AB0048FDC4' : true, '4B1C568CA0E8C79E1EF5EE32939965FE4C95A9902ABE0777CED18D6ACCC3372D2748381E' : true, '4B6771BE33B90DB64B3A400187F08B1F7AC5FFF8DCBC5583176877073BF751735E9BD358' : true, '4B798DD41D0392AA51EE04E5906F474954F9C163759F19045121A319F64C2D0555B7E073' : true, '4BE2C99196650CF40E5A9392A00AFEB28CF427FD790C3AD166068DE81E57EFBB932272D4' : true, '4C5641E50DBB2BE8CAA3ED1808AD43390483ED3399AC3608058722EDBC5E4600E3BEF9D7' : true, '4D56677ECCE6457259B74F511172E169C0DB578157E9EE82B5917DF0DD6D82EE9039C4E2' : true, '4FEBF1F070C280635D589FDA123CA9C4E392512F0ACFF505DFF6DE067F7537E165EA574B' : true, '50193E2FE8B6F4055449F3AEC98B3E1947AFB915CDA26D82467B97FA42914468726138DD' : true, '5186E81FBCB1C371B51810DB5FDCF62078E9DD0650624DB9CB36B50767F209B843BE15B3' : true, '556EBEF54C1D7C0360C43418BC9649C1245C97DF7514E7CF2DF8BE72AE957B9E04741E85' : true, '565FAA80611217F66721E62B6D61568E8025EFF46E70C8D472246584FE403B8A8D6ADBF5' : true, '58EB470764D62CBAE29B96552B9700B56A6F2A8B6E2615088DF59CD24C402418AE42A3F1' : true, '59736628512B98B410FF7D06FA22D6C8A0F8DB3F0BF417693B282EB74A6AD86DF9D448A3' : true, '5A11B922850289E1C3F22CE14EC101844B421F7515F6AE8A6ECEF97F6982A400A4D9224E' : true, '5B6F532CBB8188FA6C042C325DA56B967CA04FD8064C1CAA32A37AA94375038E8DF8DDC0' : true, '5B9EFD3B6035EA688E52FE1319144AA36B81446A5CDDF474A0F800FFBE69FD0DB6287516' : true, '5C48DCF74272EC56946D1CCC713580756631BF9EF74F9EB6C9D5A60CBA6ABED1F7BDEF7B' : true, '5E397BDDF8BAEC82E9AC62BA0C54002BCA3AFBCF1240364B44B216208880483919937CF7' : true, '5E809E845A0E650B1702F355182A3ED7786A74AC76AB147F9C6A3050BA9EA87EFE9ACE3C' : true, '5F944A7322B8F7D131EC5939F78EFE6E9FC796E8F8524F863AE1496D381242105F1B78F5' : true, '60847C5ACEDB0CD4CBA7E9FE02C6A9C0101DFA3FD50BCBBB9BB5600C1955A41AF4733A04' : true, '649CEF2E44FCC68F5207D051738FCB3DDA40188B9189A3EDEEAEDA97FE2F9DF5B7D18A41' : true, '65295911BB8F5166890D47824002C5AFC4674DDC6CE2967FF9C92E072EF8E8A7FBD6A131' : true, '6558AB15AD576C1EA8A7B569ACBFFFEBE5DF743CB601C49B9843DCAB8CE86A81109FE48E' : true, '67AC0D773011DED143AE7B737190BCA9ED8DC8386C4886AEEE079158AAC3BFE658E394B4' : true, '67CB9DC013248A829BB2171ED11BECD4D23209AD23D314232174E40D7F9D62139786633A' : true, '689B17C654E0E0E099551642F75A86D8027268293E5F5D17AAA4B3C3E6361E1F92575EAA' : true, '6960ECBE8C94D76E6F2EC4782F55F08397226AAE4A7A64A59BD16787F27F841C0A001FD0' : true, '6C397DA40E5559B23FD641B11250DE435F3B8CF2F810B37D78B4CEEC1919C37334B9C774' : true, '6CC9A76E47F10CE3533B784C4DC26AC5B72FFF92D2CE43DE0A8D4C548C503726A81E2B93' : true, '6D38C49B22244CA3A8B3A09345E157FA89C32E6B524E4D65388B9ECEDC637134ED4193A3' : true, '70B57C4881953E80DC289BBAEF1EE4854072BA31FEC351438480F62E6CB95508461EAB2F' : true, '711F0E21E7AAEA323A6623D3AB50D66996974CD6B663A7184526B1D648AD815CF51E801A' : true, '71AA6AAF1FA5C0D50E90D40BF6AADFCC55C86F7414AC8BDD6814F4D86AF15F3710E104D0' : true, '71E265FBCD7B0B845BE3BCD76320C598CFF810FB2C4FFC0156BFE1E1FABCB418C68D31C5' : true, '72E44A87E369408077EABCE3F4FFF0E15F43E5B1BFF8788CAC1CC7CA4A9AC6222BCC34C6' : true, '733A747AECBBA396A6C2E4E2C89BC0C3AEC5FB3FC8E1BFC4E54F03075A9AE800B7F7B6FA' : true, '739DD35FC63C95FEC6ED89E58208DD897FB9E2C995C97A939F9E81A07AEA9B4D70463496' : true, '74014A91B108C458CE47CDF0DD11530885A408C09C193E5D51587DCDD61330FD8CDE37BF' : true, '747B820343F0009E6BB3EC47BF85A5934463C531D7CCC1006794612BB656D3BF8257846F' : true, '74A82C81432B35609B78056B58F36582CFF360F524CB20F1FEAD89006F7F586A285B2D5B' : true, '770D19B121FD00429C3E0CA5DD0B028E25019019CFFBD9991CB76825748D945F30939542' : true, '774AF42C9DB027B747B515E4C762F0FCDF646DCB7B0FD3A96AEE88C64E2D676711FF9D5F' : true, '782A02DFDB2E14D5A75F0ADFB68E9C5D4F65566336DB6598581D584A596C87934D5F2AB4' : true, '78A5FB104BE4632ED26BFBF2B6C24B8EEC0C3716EA9EDFADD35DFBD55608E60A05D3CBF3' : true, '79E4A9840D7D3A96D7C04FE2434C892EA8985D3A65E5E5C4B2D7D66D40C6DD2FB19C5436' : true, '7A79544D07923B5BFF41F00EC739A298C060ED44CBD881BD0EF86C0BA287DDCF8167478C' : true, '7BB508999A8C18BF85277D0EAEDAB2AB24BA6D6C8A5B5837A48DB5FAE919EA675C94D217' : true, '7C62FF749D31535E684AD578AA1EBF239F744E9F2B4DBAEC0F312C50B6563B8E2D93C311' : true, '7CA50FF85B9A7D6D30AE545AE342A28A59AF82799186C7B47507CBCF035746EB04DDB716' : true, '7D86908F5BF1F240C0F73D62B5A4A93B72997913EC9B0DAE65D1B6D7B24A76A3AEC2EE16' : true, '7E234E5BA7A5B425E90007741162AED67F8AB0CFD051876A66F3360F47C88D8CD335FC74' : true, '7F667A71D3EB6978209A51149D83DA20BE36A4562FB2EE05DBB3D32323ADF445084ED656' : true, '803ABC22C1E6FB8D9B3B274A321B9A0147BEABC922EAE80E78783462A79F45C254FDE68B' : true, '8135B9FBFB12CA186936EBAE6978A1F1DCBB9EB7194BC47205C111752986835B53CAE4F8' : true, '81D6ED354F1F26D031D040DD8AE5810DE0925E18C7765E22DABD9427529DA6AF4E066428' : true, '8212F789E10B9160A4B6229F9468119268ED18B309CD5291C0D3357C1D1141BF883866B1' : true, '824AD493004D66B6A32CA77B3536CF0B687EC17E0602E3CD3F7DFBD7E28D57A0199A3F44' : true, '8292BA5BEFCD8A6FA63D55F984F6D6B7F9B5B632455F9CBEEC575F80DCE96E2CC7B278B7' : true, '84901D95304956FC4181F045D776C46B439E525F5A6A47C32CEBC45C63ED39317CE5F4DF' : true, '852FF4764CD5426CCB5E7DF717E835BD4EFCED9C6BDD0C985CA3C7D253063C5BE6FC620C' : true, '85CA765A1BD16822DCA22312CAC680345BCDCDCC66F6DCE4441FE37D5CC3134C46F47038' : true, '86386D5E49636C855CDB6DDC94B7D0F7ACED5F6553FD25CE015F1F7A483B6A749F6178C6' : true, '86420509BCA79DEC1DF32E0EBAD81DD01D8259CA2127C3CBC16CD932F62C65298CA88712' : true, '86ACDE2BC56DC3D98C2888D38D16131ECE6A64A309E42FBBD9851C453E6409EAE87D60F1' : true, '86EF8E319D9F8569A2A41A127168BA1B90DECE77F8C825340E62EBD635E1BE20CF7327DD' : true, '8714AB83C4041BF193C750E2D721EBEF30779E9315022E94856A3FF8BCF815B082F9AEFD' : true, '879055F2CE31153C33D927C876E37DE13070F8833E4AA6803E09A646AE3F7D8AE1FD1654' : true, '87CE0B7B2A0E4900E158719B37A893720563B8630D62D75ABBC8AB1E4BDFB5A899B24D43' : true, '882C8C52B8A23CF3F7BB03EAAEAC420B74207441729CDD92EC7931D823108DC28192E2BB' : true, '8949548CC8689A8329ECDC067321AB97A60F34C8626C81F68BF77DA9F667588A903F7D36' : true, '8956AA4D441E59D805A1886DEAC828B26372C49DA9FFF051B8B5C7D4E5AAE30384024B9C' : true, '8BCA525F7553D02C6F630D8F882E1CD78EB03FC3CF7BB292866268B751223DB5103405CB' : true, '8CCADC0B22CEF5BE72AC411A11A8D81291C6D6EE3E8AC86384E548C299295C756C817B81' : true, '8CD79FEBC7B8144C5478A7903BA935671F55E8839BAC30728BE7108EDE7B0BB0D3298224' : true, '8D26FF2F316D5929DDE636A7E2CE6425720FC15DDC27D456D098FABF3CDD78D31EF5A8DA' : true, '8D639B56C114E4EE9A128586119082A3D2441AA8C203AECAA96E501F124D52B68FE4C375' : true, '8D7251DBA03ACF2077DFF265065EDFEFC8C25F169EF85074D5BEE8CDA2D43CAEE75FD257' : true, '8EADB501AA4D81E48C1DD1E1140095193679CA35668772304D30A5FB873B0FA77BB70D54' : true, '8F5D770627C4983C5B9378E7D77D9BCC7E784A101C8265CC2DE1F16D47B440CAD90A1945' : true, '8F91E7EEE3FCDA86CAFCDC70EDB7B70C8250BED5A214433A66377CBC10EF83F669DA3A67' : true, '911B3F6ECD9EABEE07FE1F71D2B36127E19FE30E8B84609E809B170D72A8C5BA6E1409BD' : true, '91DE0625ABDAFD32170CBB25172A84672796BAE63F1801E277261BA0D77770028F20EEE4' : true, '91F4035520A1F8632C62DEACFB611C8E21FCBD8E7F6CAF051BD1B343ECA8E76147F20F8A' : true, '9265588BA21A317273685CB4A57A0748E621F3354379059A4B68309D8A2F74221587EC79' : true, '932A3EF6FD23690D7120D42B47992BA6CBA1C5F8B0E35EB8B94512D3F934A2E90610D336' : true, '937F901CED846717A4655F9BCB3002978781C25A96BDC2FB4C65064FF9390B26048A0E01' : true, '93C28E117BD4F30319BD2875134A454AAB48F333DB04ABB9C072DA5B0CC1D057F0369B46' : true, '93EB36130BC154F13E7505E5E01CD4375F4E1FCF31B7913B850B54F6E5FF501A2B6FC6CF' : true, '93F1AD340B2BE7A85460E2738CA49431705D2B4565C7047A540694A79AF7ABB842BDC161' : true, '9414777E3E5EFD8F30BD41B0CFE7D03075E0ABB6138512271C04F85FDDDE38E4B7242EFE' : true, '96897D61D1552B27E25A39B42A6C446F8EFDCABC93E61E925D4D1DED181A4320A467A139' : true, '9760E8575FD35047E5430C94368AB06290AEA26985FF14804C434952ECE9608477AF556F' : true, '978FC66B3B3E40857724750B76BB55F8B5D303BF8682E152919D83F184ED05F1DCE5370C' : true, '9A771918ED96CFDF1BB70EF58DB9882ECF74BFFF9B86815B08335440363E87B6B6F0BF73' : true, '9AAEF722F533FB4EEC0A249DC63D7D255E997CA5945AAB75FFD14804A974BF2AE1DFE7E1' : true, '9B340D1A315B97462698BCA6136A71969E6CEB179185A29EC6060CA53E1974AF94AF59D4' : true, '9D666ACCFFD5F543B4BF8C16D12BA8998939576E178DF705780FCC5EC84F84F6253A4893' : true, '9DFBF9ACED893322F428488325235BE0A69A91FD057F136A42630BB1760D2D51120C1650' : true, '9E80FF78010C2EC136BDFE96906E08F34ABDEEEC950D359C89AEC752A12C5B29F6D6AA0C' : true, '9F6C1F0F07AC1921F915BBD5C72CD82AF5C27CF5FFF3029ACF1A1A4BEC7EE1964C77D784' : true, '9FDDDBABFF8EFF45215FF06C9D8FFE2B9656CD7B57969895D0E141466806FBB8C6110687' : true, 'A10B44B3CA10D8006E9D0FD80F920AD1B80186D1EB9C86A54104CF3054F34C52B7E558C6' : true, 'A208E4B33EEFDE084B60D0BF7952498D8CC4307BC60755E7B22DD9F7FEA245936C7CF288' : true, 'A2339B4C747873D46CE7C1F38DCB5CE985371CA6E550143DCE2803471BDE3A09E8F8770F' : true, 'A26F53B7EE40DB4A68E7FA18D9104B7269BD8CF49CD300FB592E1793CA556AF3ECAA35FB' : true, 'A33D88FE161BDDF95C9F1A7FD8C89008A3E31E20B2E46A328520472D0CDE9523E7260C6D' : true, 'A37D2C27E4A7F3AA5F75D4C49264026AB6AF5BE5F878A00114C3D7FEF8C775C34CCD17B6' : true, 'A3EC750F2E88DFFA48014E0B5C486FFB37F76DE6077C90C5B13E931AB74110B4F2E49A27' : true, 'A66B6090239B3F2DBB986FD6A7190D46E0AB059420725493056062023670F7CD2EFC6666' : true, 'A771FD26FC3CE540F19906EBC1936DE9E619D25B380B7B13FDA33E8A58CD82D8A88E0515' : true, 'A7F2E41606411150306B9CE3B49CB0C9E12DFB4B41D7D9C32B30514BAC1D81D8385E2D46' : true, 'A80D6F3978B9436D77426D985ACC23CAD6DAA8208D09D2154D24B52FCB346EB258B28A58' : true, 'A8EDDEEB938866D82FC3BD1DBE45BE4D7639C71847E151B5C7EA01C758FBF12ABA298F7A' : true, 'A923759BBA49366E31C2DBF2E766BA87317A2AD07F2B335EF5A1C34E4B57E8B7D8F1FCA6' : true, 'A981C0B73A9250BC91A521FF3D47879FCB658264EA8CDA186E1752FB52C397367EA387BE' : true, 'AA088FF6F97BB7F2B1A71E9BEAEABD79CF9E876DD3EBFC422697A3B5A37AA076A9062348' : true, 'AA8E5DD9F8DB0A58B78D26876C823555409D4BD917B55C27B69B64CB9822440DCD09B889' : true, 'AABFBF6497DA981D6FC6083A957033CA394FF6850B06BE52E51856CC10E180E882B385CC' : true, 'AB57A65B7D428219B5D85826285EFDFFB12E13634586A46F1AB2606837582DC4ACFD9497' : true, 'ABAB8D2DB740E5973D2FF2A63BDA6A05C18211328A92B3B23809B9B5E2740A07FB12EB5E' : true, 'ABBFEAE36B29A6CCA6783599EFAD2B802F173F7DE99667AFA57AF80AA2D1B12FAC830338' : true, 'ACB694A59C17E0D791529BB19706A6E4D4DE20D05E66FC53FE1A50882C78DB2852CAE474' : true, 'AD8E0F9E016BA0C574D50CD368654F1ECFDEFE102FDA05BBE4C78D2E4423589005B2571D' : true, 'AFB8336E7CDDC60264AD58FC0D4F7BCFBC7B3C6FEF26B9F7AB10D7A1F6B67C5ED2A12D3D' : true, 'B001EE14D9AF291894768EF169332A846E3A55A4190C195C93843CC0DB722E313061F0B1' : true, 'B147BC1857D118A0782DEC71E82A9573204285DCF7EB764195578E136BD4B7D1E98E46A5' : true, 'B39C25B1C32E32538015309D4D02773E6782AAE0EDEEE21A5839D3C0CD14680A4F60142A' : true, 'B3A53E77216DAC4AC0C9FBD5413DCA0658119F0E128287EA50FDD987456F4F78DCFAD6D4' : true, 'B44ADBE85916461E5AD86EDA064352622964B686135B5DFDDD3253A89BBC24D74B08C64D' : true, 'B465220A7CADDF41B7D544D5ADFA9A75BC9219DDC98E14BF1A781F6E280B04C27F902712' : true, 'B4819E89AC1724FD2A4285271D0C2B5D20CB594FB4EDD895763FD5254E959A6674C6EEB2' : true, 'B5E83436C910445848706D2E83D4B805039EEDB80BE7A03C6953893B20D2D9323A4C2AFD' : true, 'B75274E292B48093F275E4CCD7F2EA263BC49F48F8F373A09C1EBDF85BB1C365C7D811B3' : true, 'B774CD487C5F9A0D3BF3FE66F41B3DFA5B4E0EC28EBD8292A51782241281AD9FEEDD4E4C' : true, 'B7B0D1EC1A033ECEA91511CCB16FB2AEE3D73606996CDFEF61FA04C335E98EA96104264A' : true, 'B8089AF003CC1B0DC86C0B76A1756423A0A1AB90C9FC847B3B1261E8977D5FD32261D3CC' : true, 'B816334C4C4CF2D8D34D06B4A65B4003838E30F77FDD14AA385ED145009C0E2236494FAA' : true, 'B8D312034E8C0C5A47C9B6C59E5B97FD0560A2C738FF98D1172A94FE45FB8A47D665371E' : true, 'BA21EA20D6DDDB8FC1578B40ADA1FCFC801D62D07B449D5C5C035C98EA61FA443C2A58FE' : true, 'BA926442161FCBA116481AF6405C59870456F23D1E9C43AECB0D807F1C0647551A05F456' : true, 'BC6C5133A7E9D366635415721B2192935922A1E15AEA163521F898396A4646B0441B0FA9' : true, 'BD8ACE34A8AE6148E85EC87A1CE8CCBFD2EDF88B41B6FE01461D6E2834EC7C8F6C77721E' : true, 'BDD6F58A7C3CC4A6F934CCC38961F6B2CABB51672400588E6419F1D40878D0403AA20264' : true, 'BE395ABE078AB1121725CC1D46343CB2DE990CED99E0431F60EDC3937E7CD5BF0ED9E5FA' : true, 'BF6059A35BBAF6A77642DA6F1A7B50CF5D989CDB159611365165641B560FDBEA2AC23EF1' : true, 'BFB5E77D3DEA6F1DF08A50BC8C1CFA1DE4554333CA390E128B8BF81D90B70F4002D1D6E9' : true, 'C1623E23C582739C03594B2BE977497F2AB628485E78FBF3AD9E7910DD6BDF99722C96E5' : true, 'C1D43E07AEEBECFD7589E67EA84CEBCD76B76096DD145629AC7585D37063C1BC47861C8B' : true, 'C1D951C084B86A75E82FD7D65F7EAC460B972C9EA6E7CC58D93B20BF71EC412E7209FABF' : true, 'C22A59ABCF152F4CF7E631A316AE840C9158C5EF987301A8903CFDAB03D72DA1D88909C9' : true, 'C2DBAB8E9652C5EEAEF25500896D55953913853E45C439A2DA718CDFB6F3E033E04FEE71' : true, 'C45D0E48B6AC28304E0ABCF938168757D8A6332CE0036FB185F6634F7D6A066526322827' : true, 'C463AB44201C36E437C05F279D0F6F6E97E2E99636A547554F838FBA38B82E74F89A830A' : true, 'C4D7F0B2A3C57D6167F004CD43D3BA5890DEDE9E4C4E9F6FD88617579DD391BC65A68964' : true, 'C570C4A2ED53780CC810538164CBD01D23E594945195F2414803B4D564D2A3A3F5D88B8C' : true, 'C5A1B7FF73DDD6D7343218DFFC3CAD8806083F593F15A104A069A46BA903D006B7970991' : true, 'C5DFB849CA051355EE2DBA1AC33EB028D69B561148F01C77C54578C10926DF5B856976AD' : true, 'C5E67BBF06D04F43EDC47A658AFB6B19339B6B1450249B557A01877284D9E02FC3D2D8E9' : true, 'C69F6D5CB379B00389CBF03FA4C09F8AEF2DACCBEABB682D32CE4ABD6CB90025236C07BC' : true, 'C7BD11D6918A3582C53666017C6F4779634C3B0230CF1B78B4569FECF2C04A8652EFEF0E' : true, 'C86E97F335A729144782892391A6BEC84A3F8D6BDC0E1ECFCD72E377DEF2D7FF92C19BC7' : true, 'C91962D0DA7E1020FCA4CD0380872DF551A44C28F313E3F9CB5E7C0A1E0E0DD2843758AE' : true, 'C9982777281E3D0E153C8400B88503E656E0FAC03B8F18235518E5D311CAE8C24331AB66' : true, 'CA3DD368F1035CD032FAB82B59E85ADB97817950D81C9670CC34D809CF794431367EF474' : true, 'CB17E431673EE209FE455793F30AFA1C4EB6D578499B1CCF5F581EAD56BE3D9B6744A5E5' : true, 'CBBDC3682DB3CB1859D32952E8C66489C9321DE6B5A82666CF6971A18A56F2D3A8675602' : true, 'CC4DAEFB306BD838FE50EB86614BD2269C615C4D4D85103A5326C24DBAEAE4A2D2D5CC97' : true, 'CD3B3D625B09B80936879E122F7164BA67EB337B684CEB0EC2B0760AB488278CDD9597DD' : true, 'CD68B6A7C7C4CE75E01D4F5744619209132D0D45534B6997CDB2D5C339E25576609B5CC6' : true, 'CD996CDB2AC296155ABF879EAEA5EE93EE29D6EA98E632C6E527E0906F0280688BDF44DC' : true, 'CDF439F3B51850D73EA4C591A03E214BE1A45B141A21DA1A79F41A42A961D669CD0634C1' : true, 'CE78335C5978016E18EAB936A0B92E23AE5083ED7CF45CBC8F61C621FE685D794221156E' : true, 'CF8F3B62A3CACA711BA3E1CB4857351F5D003860F002ED829DEAA41868F788186D62127F' : true, 'CFF4270DD4EDDC6516496D3DDABF6EDE3A44735AE581901F248661461E3B9CC45FF53A1B' : true, 'D2EDEE7992F78272180BFED98BEC13D8A7F8390BA57705096FD36941D42E7198C6D4D9D5' : true, 'D35376E3CE58C5B0F29FF42A05F0A1F2211165CA379FBB5ED801E31C430A62AAC109BCB4' : true, 'D3D9BDAE9FAC6724B3C81B52E1B9A9BD4A65D5F41DEF39B8B8904A4AD3648133CFC7A1D1' : true, 'D3F3A616C0FA6B1D59B12D964D0E112E74F8A3C3EFE7B390064B83903C21646020E5DFCE' : true, 'D474DE575C39B2D39C8583C5C065498A5FB7EE0633E259DBAD0C4C9AE6D38F1A61C7DC25' : true, 'D480656824F9892228DBF5A49A178F14016897E1A0B8F2C3B134665C20A727B7A158E28F' : true, 'D59788DA6416E71D664AA6EA37FC7ADCEC93DE083C93D933A986B3D5CDE25ACB2FEECF8E' : true, 'D5BEFFB5EE826CF0E2578EA7E5346F03D904080A4929C838E9F185ECF7A22DEF99342407' : true, 'D5E98140C51869FC462C8975620FAA7807E032E020B72C3F192F0628A2593A19A70F069E' : true, 'D63981C6527E9669FCFCCA66ED05F296B51C067CEE2B0C3DF855AB2D92F4FE39D4E70F0E' : true, 'D6A5C3ED5DDD3E00C13D87921F1D3FE4B31EB1B740E36C8402DADC37D44DF5D4674952F9' : true, 'D6ED3CCAE2660FAF10430D779B0409BF85B5FF679B0C79961FC86E4422004613DB179284' : true, 'D7343DEF1D270928E131025B132BDDF7B172B1A56D95F91FE50287E14D37EA6A4463768A' : true, 'D87E32EF69F8BF72031D4082E8A775AF42EFDDE6BFF35ED0BAE6ACDD204C50AE86C4F4FA' : true, 'DA26B6E6C7C2F7B79E4659B3577718653E84D3BCC544C0F6FA19435C851F3F2FCBA8E814' : true, 'DB233DF969FA4BB9958044735E7D4183273EE12457FDC4F90C55E82B56167F62F532E547' : true, 'DBC8F2272EB1EA6A29235DFE563E33DFC8EC8C879269CB4BAB39E98D7E5767F31495739D' : true, 'DC32C3A76D2557C768099DEA2DA9A2D18782C6C304353BCFD29692D2593E7D44D934FF11' : true, 'DC6D6FAF897CDD17332FB5BA9035E9CE7F88CD7223F3C813818C994614A89C99FA3B5247' : true, 'DD753F56BFBBC5A17A1553C690F9FBCC24A40A1F573643A67F0A4B0749F6A22BF28ABB6B' : true, 'DF0DBC7CC836B77699A1ABF0D20F896A342CD9D3062DA48C346965297F081EBC2EF68FDC' : true, 'DF168A83EA83845DB96501C6A65D193EDBAC3C7AA4254DA1AA5CAAD68468CB88EEDDEEA8' : true, 'DF3C735981E7395081044C34A2CBB37B61573A11DF0ED87ED5926522EAD056D744B32371' : true, 'DFF28073CCF1E66173FCF542E9C57CEE99A69BE61AFE886B4D2B82007CB854FC317E1539' : true, 'E006A1C97DCFC9FC0DC0567596D862139BAAE59F56EE21CB435ABE2593DFA7F040D11DCB' : true, 'E14B5273D71BDB9330E5BDE4096EBEFB216B2A29E62A00CE820146D8244141B92511B279' : true, 'E1C07EA0AABBD4B77B84C228117808A7CDD4EEAE6000AC7F40C3802C171E30148030C072' : true, 'E2D52023ECEEB872E12B5D296FFA43DA9BACF3B664EAC5A17BED08437C72E4ACDA12F7E7' : true, 'E2D8F867F4509435FC5E05FC822295C30446C8BB9A6983C95C8A2E5464687C1115AAB74A' : true, 'E2F8E080D0083F1EC1E9D23F8069AE06C73026E325FE21916B55C4B53A56B13DCAF3D625' : true, 'E60BD2C9CA2D88DB1A710E4B78EB024140E78C1D523D1CD9954FAC1A1AB3BD3CBAA15BFC' : true, 'E77ADCB11F6E061F746C591627C34BC07454535C24A3A758207E3E3ED324F816FB211649' : true, 'E8CC9FB09B40C51F4FBA7421F952857A688B6EB807E8EDA5C7B17C4393D0795F0FAE155F' : true, 'EBB04F1D3A2E372F1DDA6E27D6B680FA18F7C1FCC3090203FD5BAA2F861A754976C8DD25' : true, 'EBF59D290D61F9421F7CC2BA6DE3150928903A635B5280FAE6774C0B6DA7D6BAA64AF2E8' : true, 'EC407D2B765267052CEAF23A4F65F0D8A5EC73D48C34FCBEF1005AEB85843524BBFAB727' : true, 'ED41F58C50C52B9C73E6EE6CEBC2A8261B4B396126276B6491A2686DD70243212D1F1D96' : true, 'EE2931BC327E9AE6E8B5F751B4347190503006091D97D4F5AE39F7CBE7927D7D652D3431' : true, 'EE7A41E0CF757D889280A21A9A7BA157679A4F81FC705DDEC419778DD2EBD875F4C242C6' : true, 'EEFE6169656EF89CC62AF4D72B63EFA29FAD91A6CE6AC6C50047C44EC9D4A50D92D84979' : true, 'EF5AF133EFF1CDBB5102EE12144B96C4A1DB6393916F17E4185509400415C70240B0AE6B' : true, 'F058C503826717AB8FDA0310278E19C2CB44A097857C45FA187ED952086CB9841F2D51B5' : true, 'F096B62FC510D5678E832532E85E2EE52388C9D371CC9E963DFF7D3CA7CEFCD625EC190D' : true, 'F09E639376A595BC1861F19BFBD364DD80BF3DE9A41D768D194B293C85632CDBC8EA8CF7' : true, 'F16A2218C9CDDFCE821D1DB7785CA9A57998A308E14D6585E6C21E153A719FBA5AD34AD9' : true, 'F1BC636A54E0B527F5CDE71AE34D6E4A36B12B49F9819ED74C9EBC380FC6568F5DACB2F7' : true, 'F20598E5964BBE5D55181B55B388E3929078C5A28F9A4325C2A7C73813CDFE13C20F934E' : true, 'F27DE954E4A3220D769FE70BBBB3242B049811056AFE9FD0F5BE01685AACE6A5D1C4454C' : true, 'F37E3A13DC746306741A3C38328CFBA9253F775B0E7797AB645F15915597C39E263631D1' : true, 'F3D752A875FD18ECE17D35B1706EA59C968338F113E36A7BABDD08F7776391A68736582E' : true, 'F4FF97428070FE66168BBED35315819BF44095C238AC73FC4F77BF8F98DF70F8F091BC52' : true, 'F520DA5203862B92768D5CB72D8B93ADA65CB4733D94A5C865A864647C2C01272C89B143' : true, 'F775AB29FB514EB7775EFF053C998EF5DE28F4A4FFE5B92FA3C503D1A349A7F9962A8212' : true, 'F7B661AB03C25C463E2D2CF4A124D854FAA7D9FB31B746F200A85E65797613D816E063B5' : true, 'F8387C7788DF2C16682EC2E2524BB8F95F3AFC0A8B64F686673474DF7EA9A2FEF9FA7A51' : true, 'F8BEC46322C9A846748BB81D1E4A2BF661EF43D77FCAD46151BC98E0C35912AF9FEB6311' : true, 'FB1B5D438A94CD44C676F2434B47E731F18B538D1BE903B6A6F056435B171589CAF36BF2' : true, 'FC11B8D8089330006D23F97EEB521E0270179B868C00A4FA609152223F9F3E32BDE00562' : true, 'FD49BE5B185A25ECF9C354851040E8D4086418E906CEE89C2353B6E27FBD9E7439F76316' : true };
ahri/docker-private-browsing
browser/profile/extensions/https-everywhere@eff.org/chrome/content/code/Root-CAs.js
JavaScript
lgpl-3.0
29,290
#include "exportable_link.hpp" #include "../../conversion.hpp" namespace WikiMarkup { namespace Components { //##################################################################################################################### std::string ExportableLink::toJson() const { return WikiMarkup::toJson(*this, "Link"); } //--------------------------------------------------------------------------------------------------------------------- void ExportableLink::fromJson(std::string const& str) { WikiMarkup::fromJson(*this, str); } //--------------------------------------------------------------------------------------------------------------------- void ExportableLink::fromJson(JSON::ObjectReader const& reader) { reader.get("data", *this); } //--------------------------------------------------------------------------------------------------------------------- ExportableLink* ExportableLink::clone() const { return new ExportableLink(*this); } //##################################################################################################################### } }
5cript/wiki-markup
components/exportable_components/exportable_link.cpp
C++
lgpl-3.0
1,176
# Copyright 2012-2013 Greg Horn # # This file is part of rawesome. # # rawesome is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # rawesome is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with rawesome. If not, see <http://www.gnu.org/licenses/>. import numpy as np import time #from pylab import * import casadi as C import nmheMaps from ocputils import Constraints from newton import Newton from collocation import LagrangePoly class Nmhe(object): def __init__(self,dae,nk): self.dae = dae self.nk = nk self._gaussNewtonObjF = [] mapSize = len(self.dae.xNames())*(self.nk+1) + len(self.dae.pNames()) V = C.msym('dvs',mapSize) self._dvMap = nmheMaps.VectorizedReadOnlyNmheMap(self.dae,self.nk,V) self._boundMap = nmheMaps.WriteableNmheMap(self.dae,self.nk) self._guessMap = nmheMaps.WriteableNmheMap(self.dae,self.nk) self._U = C.msym('u',self.nk,len(self.dae.uNames())) self._outputMapGenerator = nmheMaps.NmheOutputMapGenerator(self,self._U) self._outputMap = nmheMaps.NmheOutputMap(self._outputMapGenerator, self._dvMap.vectorize(), self._U) self._constraints = Constraints() def __call__(self,*args,**kwargs): return self.lookup(*args,**kwargs) def lookup(self,name,timestep=None): try: return self._dvMap.lookup(name,timestep=timestep) except NameError: pass try: return self._outputMap.lookup(name,timestep) except NameError: pass raise NameError("unrecognized name \""+name+"\"") def bound(self,name,(lb,ub),timestep=None): self._boundMap.setVal(name,(lb,ub),timestep=timestep) def guess(self,name,val,timestep=None): self._guessMap.setVal(name,val,timestep=timestep) def constrain(self,lhs,comparison,rhs,tag=('unnamed_constraint',None)): self._constraints.add(lhs,comparison,rhs,tag) def setObj(self,obj): if hasattr(self,'_obj'): raise ValueError("don't change the objective function") self._obj = obj def addGaussNewtonObjF(self,gnF): self._gaussNewtonObjF.append(gnF) def _setupDynamicsConstraints(self,endTime,traj): # Todo: add parallelization # Todo: get endTime right g = [] nicp = 1 deg = 4 p = self._dvMap.pVec() for k in range(self.nk): newton = Newton(LagrangePoly,self.dae,1,nicp,deg,'RADAU') newton.setupStuff(endTime) X0_i = self._dvMap.xVec(k) U_i = self._U[k,:].T # guess if traj is None: newton.isolver.setOutput(1,0) else: X = C.DMatrix([[traj.lookup(name,timestep=k,degIdx=j) for j in range(1,traj.dvMap._deg+1)] \ for name in traj.dvMap._xNames]) Z = C.DMatrix([[traj.lookup(name,timestep=k,degIdx=j) for j in range(1,traj.dvMap._deg+1)] \ for name in traj.dvMap._zNames]) newton.isolver.setOutput(C.veccat([X,Z]),0) _, Xf_i = newton.isolver.call([X0_i,U_i,p]) X0_i_plus = self._dvMap.xVec(k+1) g.append(Xf_i-X0_i_plus) return g def makeSolver(self,endTime,traj=None): # make sure all bounds are set (xMissing,pMissing) = self._boundMap.getMissing() msg = [] for name in xMissing: msg.append("you forgot to set a bound on \""+name+"\" at timesteps: "+str(xMissing[name])) for name in pMissing: msg.append("you forgot to set a bound on \""+name+"\"") if len(msg)>0: raise ValueError('\n'.join(msg)) # constraints: g = self._constraints.getG() glb = self._constraints.getLb() gub = self._constraints.getUb() gDyn = self._setupDynamicsConstraints(endTime,traj) gDynLb = gDynUb = [C.DMatrix.zeros(gg.shape) for gg in gDyn] g = C.veccat([g]+gDyn) glb = C.veccat([glb]+gDynLb) gub = C.veccat([gub]+gDynUb) self.glb = glb self.gub = gub # design vars V = self._dvMap.vectorize() # gradient of arbitraryObj if hasattr(self,'_obj'): arbitraryObj = self._obj else: arbitraryObj = 0 gradF = C.gradient(arbitraryObj,V) # hessian of lagrangian: Js = [C.jacobian(gnf,V) for gnf in self._gaussNewtonObjF] gradFgns = [C.mul(J.T,F) for (F,J) in zip(self._gaussNewtonObjF, Js)] gaussNewtonHess = sum([C.mul(J.T,J) for J in Js]) hessL = gaussNewtonHess + C.jacobian(gradF,V) gradF += sum(gradFgns) # equality/inequality constraint jacobian gfcn = C.MXFunction([V,self._U],[g]) gfcn.init() jacobG = gfcn.jacobian(0,0) jacobG.init() # function which generates everything needed f = sum([f_*f_ for f_ in self._gaussNewtonObjF]) if hasattr(self,'_obj'): f += self._obj self.masterFun = C.MXFunction([V,self._U],[hessL, gradF, g, jacobG.call([V,self._U])[0], f]) self.masterFun.init() # self.qp = C.CplexSolver(hessL.sparsity(),jacobG.output(0).sparsity()) self.qp = C.NLPQPSolver(hessL.sparsity(),jacobG.output(0).sparsity()) self.qp.setOption('nlp_solver',C.IpoptSolver) self.qp.setOption('nlp_solver_options',{'print_level':0,'print_time':False}) self.qp.init() def runSolver(self,U,trajTrue=None): # make sure all bounds are set (xMissing,pMissing) = self._guessMap.getMissing() msg = [] for name in xMissing: msg.append("you forgot to set a guess for \""+name+"\" at timesteps: "+str(xMissing[name])) for name in pMissing: msg.append("you forgot to set a guess for \""+name+"\"") if len(msg)>0: raise ValueError('\n'.join(msg)) lbx,ubx = zip(*(self._boundMap.vectorize())) xk = C.DMatrix(list(self._guessMap.vectorize())) for k in range(100): ############# plot stuff ############### print "iteration: ",k # import nmheMaps # xOpt = np.array(xk).squeeze() # traj = nmheMaps.VectorizedReadOnlyNmheMap(self.dae,self.nk,xOpt) # # xsT = np.array([trajTrue.lookup('x',timestep=kk) for kk in range(self.nk+1)] ) # ysT = np.array([trajTrue.lookup('y',timestep=kk) for kk in range(self.nk+1)] ) # zsT = np.array([trajTrue.lookup('z',timestep=kk) for kk in range(self.nk+1)] ) # # xs = np.array([traj.lookup('x',timestep=kk) for kk in range(self.nk+1)] ) # ys = np.array([traj.lookup('y',timestep=kk) for kk in range(self.nk+1)] ) # zs = np.array([traj.lookup('z',timestep=kk) for kk in range(self.nk+1)] ) # # outputMap = nmheMaps.NmheOutputMap(self._outputMapGenerator, xOpt, U) # c = np.array([outputMap.lookup('c',timestep=kk) for kk in range(self.nk)]) # cdot = np.array([outputMap.lookup('cdot',timestep=kk) for kk in range(self.nk)]) # # figure() # title(str(float(k))) # subplot(3,2,1) # plot(xs) # plot(xsT) # ylabel('x '+str(k)) # # subplot(3,2,3) # plot(ys) # plot(ysT) # ylabel('y '+str(k)) # # subplot(3,2,5) # plot(zs) # plot(zsT) # ylabel('z '+str(k)) # ## subplot(2,2,2) ## plot(dxs,-dzs) ## ylabel('vel') ## axis('equal') # # subplot(3,2,2) # plot(c) # ylabel('c') # # subplot(3,2,4) # plot(cdot) # ylabel('cdot') # ########################################## self.masterFun.setInput(xk,0) self.masterFun.setInput(U,1) t0 = time.time() try: self.masterFun.evaluate() except RuntimeError as e: print "ERRRRRRRRRRRRROR" show() raise e t1 = time.time() masterFunTime = (t1-t0)*1000 hessL = self.masterFun.output(0) gradF = self.masterFun.output(1) g = self.masterFun.output(2) jacobG = self.masterFun.output(3) f = self.masterFun.output(4) self.qp.setInput(0, C.QP_X_INIT) self.qp.setInput(hessL, C.QP_H) self.qp.setInput(jacobG, C.QP_A) self.qp.setInput(gradF, C.QP_G) assert all((lbx-xk) <= 0), "lower bounds violation" assert all((ubx-xk) >= 0), "upper bounds violation" self.qp.setInput(lbx-xk,C.QP_LBX) self.qp.setInput(ubx-xk,C.QP_UBX) self.qp.setInput(self.glb-g, C.QP_LBA) self.qp.setInput(self.gub-g, C.QP_UBA) t0 = time.time() self.qp.evaluate() t1 = time.time() # print "gradF: ",gradF # print 'dim(jacobG): "gra # print "rank: ",np.linalg.matrix_rank(jacobG) print "masterFun delta time: %.3f ms" % masterFunTime print "f: ",f,'\tmax constraint: ',max(C.fabs(g)) print "qp delta time: %.3f ms" % ((t1-t0)*1000) print "" deltaX = self.qp.output(C.QP_PRIMAL) # import scipy.io # scipy.io.savemat('hessL.mat',{'hessL':np.array(hessL), # 'gradF':np.array(gradF), # 'x0':0*np.array(deltaX), # 'xopt':np.array(deltaX), # 'lbx':np.array(lbx-xk), # 'ubx':np.array(ubx-xk), # 'jacobG':np.array(jacobG), # 'lba':np.array(self.glb-g), # 'uba':np.array(self.gub-g)}) # import sys; sys.exit() # print deltaX xk += deltaX # show()
ghorn/rawesome
rawe/newton/nmhe.py
Python
lgpl-3.0
10,700
/**************************************************************************** ** ** Copyright (C) 2015 The Qt Company Ltd. ** Contact: http://www.qt.io/licensing/ ** ** This file is part of the QtDeclarative module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL21$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see http://www.qt.io/terms-conditions. For further ** information use the contact form at http://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 or version 3 as published by the Free ** Software Foundation and appearing in the file LICENSE.LGPLv21 and ** LICENSE.LGPLv3 included in the packaging of this file. Please review the ** following information to ensure the GNU Lesser General Public License ** requirements will be met: https://www.gnu.org/licenses/lgpl.html and ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** As a special exception, The Qt Company gives you certain additional ** rights. These rights are described in The Qt Company LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include "boundingrecthighlighter.h" #include "qdeclarativeviewinspector.h" #include "qmlinspectorconstants.h" #include <QtWidgets/QGraphicsPolygonItem> #include <QtCore/QTimer> #include <QtCore/QObject> #include <QtCore/QDebug> namespace QmlJSDebugger { namespace QtQuick1 { BoundingBox::BoundingBox(QGraphicsObject *itemToHighlight, QGraphicsItem *parentItem, QObject *parent) : QObject(parent), highlightedObject(itemToHighlight), highlightPolygon(0), highlightPolygonEdge(0) { highlightPolygon = new BoundingBoxPolygonItem(parentItem); highlightPolygonEdge = new BoundingBoxPolygonItem(parentItem); highlightPolygon->setPen(QPen(QColor(0, 22, 159))); highlightPolygonEdge->setPen(QPen(QColor(158, 199, 255))); highlightPolygon->setFlag(QGraphicsItem::ItemIsSelectable, false); highlightPolygonEdge->setFlag(QGraphicsItem::ItemIsSelectable, false); } BoundingBox::~BoundingBox() { highlightedObject.clear(); } BoundingBoxPolygonItem::BoundingBoxPolygonItem(QGraphicsItem *item) : QGraphicsPolygonItem(item) { QPen pen; pen.setColor(QColor(108, 141, 221)); pen.setWidth(1); setPen(pen); } int BoundingBoxPolygonItem::type() const { return Constants::EditorItemType; } BoundingRectHighlighter::BoundingRectHighlighter(QDeclarativeViewInspector *view) : LiveLayerItem(view->declarativeView()->scene()), m_view(view) { } BoundingRectHighlighter::~BoundingRectHighlighter() { } void BoundingRectHighlighter::clear() { foreach (BoundingBox *box, m_boxes) freeBoundingBox(box); } BoundingBox *BoundingRectHighlighter::boxFor(QGraphicsObject *item) const { foreach (BoundingBox *box, m_boxes) { if (box->highlightedObject.data() == item) return box; } return 0; } void BoundingRectHighlighter::highlight(QList<QGraphicsObject*> items) { if (items.isEmpty()) return; QList<BoundingBox *> newBoxes; foreach (QGraphicsObject *itemToHighlight, items) { BoundingBox *box = boxFor(itemToHighlight); if (!box) box = createBoundingBox(itemToHighlight); newBoxes << box; } qSort(newBoxes); if (newBoxes != m_boxes) { clear(); m_boxes << newBoxes; } highlightAll(); } void BoundingRectHighlighter::highlight(QGraphicsObject* itemToHighlight) { if (!itemToHighlight) return; BoundingBox *box = boxFor(itemToHighlight); if (!box) { box = createBoundingBox(itemToHighlight); m_boxes << box; qSort(m_boxes); } highlightAll(); } BoundingBox *BoundingRectHighlighter::createBoundingBox(QGraphicsObject *itemToHighlight) { if (!m_freeBoxes.isEmpty()) { BoundingBox *box = m_freeBoxes.last(); if (box->highlightedObject.isNull()) { box->highlightedObject = itemToHighlight; box->highlightPolygon->show(); box->highlightPolygonEdge->show(); m_freeBoxes.removeLast(); return box; } } BoundingBox *box = new BoundingBox(itemToHighlight, this, this); connect(itemToHighlight, SIGNAL(xChanged()), this, SLOT(refresh())); connect(itemToHighlight, SIGNAL(yChanged()), this, SLOT(refresh())); connect(itemToHighlight, SIGNAL(widthChanged()), this, SLOT(refresh())); connect(itemToHighlight, SIGNAL(heightChanged()), this, SLOT(refresh())); connect(itemToHighlight, SIGNAL(rotationChanged()), this, SLOT(refresh())); connect(itemToHighlight, SIGNAL(destroyed(QObject*)), this, SLOT(itemDestroyed(QObject*))); return box; } void BoundingRectHighlighter::removeBoundingBox(BoundingBox *box) { delete box; box = 0; } void BoundingRectHighlighter::freeBoundingBox(BoundingBox *box) { if (!box->highlightedObject.isNull()) { disconnect(box->highlightedObject.data(), SIGNAL(xChanged()), this, SLOT(refresh())); disconnect(box->highlightedObject.data(), SIGNAL(yChanged()), this, SLOT(refresh())); disconnect(box->highlightedObject.data(), SIGNAL(widthChanged()), this, SLOT(refresh())); disconnect(box->highlightedObject.data(), SIGNAL(heightChanged()), this, SLOT(refresh())); disconnect(box->highlightedObject.data(), SIGNAL(rotationChanged()), this, SLOT(refresh())); } box->highlightedObject.clear(); box->highlightPolygon->hide(); box->highlightPolygonEdge->hide(); m_boxes.removeOne(box); m_freeBoxes << box; } void BoundingRectHighlighter::itemDestroyed(QObject *obj) { foreach (BoundingBox *box, m_boxes) { if (box->highlightedObject.data() == obj) { freeBoundingBox(box); break; } } } void BoundingRectHighlighter::highlightAll() { foreach (BoundingBox *box, m_boxes) { if (box && box->highlightedObject.isNull()) { // clear all highlights clear(); return; } QGraphicsObject *item = box->highlightedObject.data(); QRectF boundingRectInSceneSpace(item->mapToScene(item->boundingRect()).boundingRect()); QRectF boundingRectInLayerItemSpace = mapRectFromScene(boundingRectInSceneSpace); QRectF bboxRect = m_view->adjustToScreenBoundaries(boundingRectInLayerItemSpace); QRectF edgeRect = bboxRect; edgeRect.adjust(-1, -1, 1, 1); box->highlightPolygon->setPolygon(QPolygonF(bboxRect)); box->highlightPolygonEdge->setPolygon(QPolygonF(edgeRect)); } } void BoundingRectHighlighter::refresh() { if (!m_boxes.isEmpty()) highlightAll(); } } // namespace QtQuick1 } // namespace QmlJSDebugger
qtproject/qtquick1
src/plugins/qmltooling/qmldbg_inspector/editor/boundingrecthighlighter.cpp
C++
lgpl-3.0
7,253
/** * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright (C) 2007 Matthias Braeuer (braeuer.matthias@web.de). * * All rights reserved. * * * * This work was done as a project at the Chair for Software Technology, * * Dresden University Of Technology, Germany (http://st.inf.tu-dresden.de). * * It is understood that any modification not identified as such is not * * covered by the preceding statement. * * * * This work is free software; you can redistribute it and/or modify it * * under the terms of the GNU Library General Public License as published * * by the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * * This work is distributed in the hope that it will be useful, but WITHOUT * * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public * * License for more details. * * * * You should have received a copy of the GNU Library General Public License * * along with this library; if not, you can view it online at * * http://www.fsf.org/licensing/licenses/gpl.html. * * * * To submit a bug report, send a comment, or get the latest news on this * * project, please visit the website: http://dresden-ocl.sourceforge.net. * * For more information on OCL and related projects visit the OCL Portal: * * http://st.inf.tu-dresden.de/ocl * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * $Id$ */ package org.dresdenocl.pivotmodel; import java.util.List; import org.eclipse.emf.ecore.EObject; /** * <!-- begin-user-doc --> A representation of the model object ' * <em><b>Named Element</b></em>'. <!-- end-user-doc --> * * <!-- begin-model-doc --> * <p> * A <code>NamedElement</code> represents elements that * may have a name. The name is used for identification of * the named element within the elements owned by its owner. * A named element also has a qualified name that allows it to * be unambiguously identified within a hierarchy of nested * named elements. <code>NamedElement</code> is an * abstract metaclass. * </p> * <!-- end-model-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link org.dresdenocl.pivotmodel.NamedElement#getName <em>Name</em>}</li> * <li>{@link org.dresdenocl.pivotmodel.NamedElement#getQualifiedName <em>Qualified Name</em>}</li> * <li>{@link org.dresdenocl.pivotmodel.NamedElement#getOwner <em>Owner</em>}</li> * </ul> * </p> * * @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement() * @model abstract="true" * @generated */ public interface NamedElement extends EObject { /** * Returns the value of the '<em><b>Name</b></em>' attribute. * The default value is <code>""</code>. * <!-- begin-user-doc --> <!-- end-user-doc --> * <!-- begin-model-doc --> * <p> * Represents the name of the <code>NamedElement</code>. * </p> * <!-- end-model-doc --> * @return the value of the '<em>Name</em>' attribute. * @see #setName(String) * @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement_Name() * @model default="" dataType="org.dresdenocl.datatypes.String" required="true" ordered="false" * @generated */ String getName(); /** * Sets the value of the '{@link org.dresdenocl.pivotmodel.NamedElement#getName <em>Name</em>}' attribute. * <!-- begin-user-doc --> <!-- end-user-doc --> * @param value the new value of the '<em>Name</em>' attribute. * @see #getName() * @generated */ void setName(String value); /** * Returns the value of the '<em><b>Qualified Name</b></em>' attribute. <!-- * begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc --> * <p> * A qualified name allows the <code>NamedElement to be * identified within a hierarchy of nested elements. It is * constructed from the names of the owners starting at the * root of the hierarchy and ending with the name of the * <code>NamedElement</code> itself. This is a derived attribute. * </p> * <!-- end-model-doc --> * * @return the value of the '<em>Qualified Name</em>' attribute. * @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement_QualifiedName() * @model dataType="org.dresdenocl.datatypes.String" transient="true" * changeable="false" volatile="true" derived="true" * @generated */ String getQualifiedName(); /** * Returns the value of the '<em><b>Owner</b></em>' reference. <!-- * begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc --> * <p> * A <code>NamedElement</code> may have an owner whose name is used for * constructing the qualified name of the <code>NamedElement</code>. This is a * derived attribute. * * The concept of an owner was introduced in the Pivot Model to facilitate the * computation of qualified names which are not available in UML::Core::Basic. * However, the Pivot Model does not extend the concept of a {@link Namespace} * to {@link Type types} and {@link Operation operations} as in the complete * UML 2.0 specification. That's why arbitrary named elements are not * necessarily located in a namespace. * </p> * <!-- end-model-doc --> * * @return the value of the '<em>Owner</em>' reference. * @see org.dresdenocl.pivotmodel.PivotModelPackage#getNamedElement_Owner() * @model resolveProxies="false" transient="true" changeable="false" * volatile="true" * @generated */ NamedElement getOwner(); /** * <!-- begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc --> * <p> * Creates a copy of this <code>NamedElement</code>. In the Pivot Model, all * clone operations are intended to perform deep cloning (as opposed to a * shallow clone). That means, that all contained elements (i.e., all elements * for which this <code>NamedElement</code> is the owner) have to be cloned as * well. Cloning support is necessary for binding {@link GenericElement}s * because such an element needs to be cloned first before its * {@link TypeParameter}s can be bound. * </p> * <!-- end-model-doc --> * * @model required="true" * exceptions="org.dresdenocl.pivotmodel.CloneNotSupportedException" * @generated */ NamedElement clone() throws CloneNotSupportedException; /** * <!-- begin-user-doc --> <!-- end-user-doc --> <!-- begin-model-doc --> * <p> * Returns the name of this {@link NamedElement} as a {@link java.util.List} * of {@link java.lang.String}s containing the name of the name spaces and of * this {@link NamedElement}. * </p> * <!-- end-model-doc --> * * @model kind="operation" dataType="org.dresdenocl.datatypes.String" * unique="false" * @generated */ List<String> getQualifiedNameList(); } // NamedElement
dresden-ocl/dresdenocl
plugins/org.dresdenocl.pivotmodel/src/org/dresdenocl/pivotmodel/NamedElement.java
Java
lgpl-3.0
7,696
package nota.oxygen.epub.notes; import java.awt.*; import java.awt.event.*; import javax.swing.*; import java.beans.*; import java.io.File; import java.util.HashSet; import java.util.Set; import nota.oxygen.epub.EpubUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import de.schlichtherle.truezip.file.TArchiveDetector; import de.schlichtherle.truezip.file.TConfig; import de.schlichtherle.truezip.file.TFile; import de.schlichtherle.truezip.fs.archive.zip.JarDriver; import de.schlichtherle.truezip.socket.sl.IOPoolLocator; @SuppressWarnings("serial") public class NoteRefRemover extends JPanel implements ActionListener, PropertyChangeListener { private static JFrame frame; private JButton startButton; private JTextArea taskOutput; private Task task; private static String fileName = ""; public static boolean ERRORS_FOUND; class Task extends SwingWorker<Void, Void> { @Override protected Void doInBackground() throws Exception { if (!EpubUtils.start(taskOutput)) return null; if (!EpubUtils.unzip(taskOutput)) return null; if (!EpubUtils.backup(taskOutput)) return null; EpubUtils.outputProcess("REMOVING NOTEREFS FROM DOCUMENT", true, taskOutput); Document doc = EpubUtils.createDocument(new File(EpubUtils.EPUB_FOLDER + File.separator + fileName), taskOutput); if (doc == null) { return null; } NodeList liNodeList = doc.getDocumentElement().getElementsByTagName("li"); for (int i=0; i<liNodeList.getLength(); i++) { Element liElement = (Element) liNodeList.item(i); NodeList pNodeList = liElement.getElementsByTagName("p"); String id = liElement.getAttribute("id"); if (id == null || id.equals("")) { EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element " + liElement.getTextContent() + ", id not found"); ERRORS_FOUND = true; continue; } String epubType = liElement.getAttribute("epub:type"); if (epubType == null || epubType.equals("")) { EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element with id " + id + ", epub:type not found"); ERRORS_FOUND = true; continue; } if (!epubType.equals("footnote") && !epubType.equals("rearnote")) { EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element with id " + id + ", epub:type should be either footnote or rearnote"); ERRORS_FOUND = true; continue; } if (pNodeList.getLength() == 0) { NodeList aNodeList = liElement.getElementsByTagName("a"); System.out.println("notes: " + aNodeList.getLength()); Set<Node> removeNodes = new HashSet<Node>(); for (int j = 0; j < aNodeList.getLength(); j++) { removeNodes.add(aNodeList.item(j)); } for (Node node : removeNodes) { System.out.println("removing node with href " + ((Element)node).getAttribute("href")); liElement.removeChild(node); } } else if (pNodeList.getLength() == 1) { Element pElement = (Element) pNodeList.item(0); NodeList aNodeList = pElement.getElementsByTagName("a"); System.out.println("notes: " + aNodeList.getLength()); Set<Node> removeNodes = new HashSet<Node>(); for (int j = 0; j < aNodeList.getLength(); j++) { removeNodes.add(aNodeList.item(j)); } for (Node node : removeNodes) { System.out.println("removing node with href " + ((Element)node).getAttribute("href")); pElement.removeChild(node); } } else { EpubUtils.outputMessage(taskOutput, "Cannot remove noteref from list element, too many paragraphs"); ERRORS_FOUND = true; continue; } } if (!EpubUtils.saveDocument(doc, new File(EpubUtils.EPUB_FOLDER + File.separator + fileName), taskOutput)) return null; EpubUtils.outputProcess("MODIFYING EPUB", true, taskOutput); // obtain the global configuration TConfig config = TConfig.get(); config.setArchiveDetector(new TArchiveDetector("epub", new JarDriver(IOPoolLocator.SINGLETON))); // get epub file destination String epubPath = EpubUtils.EPUB.getPath(); String epubFolder = EpubUtils.EPUB_FOLDER.substring(EpubUtils.EPUB_FOLDER.lastIndexOf(File.separator)).replace(File.separator, ""); TFile destination = new TFile(epubPath + File.separator + epubFolder); // modify epub file destination if (!EpubUtils.addFileToEpub(new TFile(EpubUtils.EPUB_FOLDER + File.separator + fileName), destination, taskOutput)) return null; // commit changes to epub file destination if (!EpubUtils.commitChanges(taskOutput)) return null; if (!EpubUtils.finish(taskOutput)) return null; return null; } @Override public void done() { Toolkit.getDefaultToolkit().beep(); startButton.setEnabled(true); setCursor(null); EpubUtils.outputMessage(taskOutput, "Done"); } } public NoteRefRemover() { super(new BorderLayout()); // Create the demo's UI. startButton = new JButton("Start"); startButton.setActionCommand("start"); startButton.addActionListener(this); startButton.setVisible(false); taskOutput = new JTextArea(30, 130); taskOutput.setMargin(new Insets(5, 5, 5, 5)); taskOutput.setEditable(false); JPanel panel = new JPanel(); panel.add(startButton); add(panel, BorderLayout.PAGE_START); add(new JScrollPane(taskOutput), BorderLayout.CENTER); setBorder(BorderFactory.createEmptyBorder(20, 20, 20, 20)); startButton.doClick(); } @Override public void propertyChange(PropertyChangeEvent evt) { // TODO Auto-generated method stub } @Override public void actionPerformed(ActionEvent e) { startButton.setEnabled(false); setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); task = new Task(); task.addPropertyChangeListener(this); task.execute(); } private static void createAndShowGUI() { JComponent newContentPane = new NoteRefRemover(); newContentPane.setOpaque(true); frame = new JFrame("Removing noterefs freom " + fileName); frame.setContentPane(newContentPane); frame.pack(); frame.setVisible(true); } public static void main(String[] args) { EpubUtils.EPUB = new File(args[0]); EpubUtils.EPUB_FOLDER = args[1]; EpubUtils.prepare("noterefremover", "noterefremove"); fileName = args[2]; // Schedule a job for the event-dispatching thread: creating and showing this application's GUI. javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { createAndShowGUI(); } }); } }
ybk/nota.oxygen
addins/src/nota/oxygen/epub/notes/NoteRefRemover.java
Java
lgpl-3.0
6,848
<?php /** * 任务指派模块------任务指派默认控制器 * * @link http://www.ibos.com.cn/ * @copyright Copyright &copy; 2008-2013 IBOS Inc * @author gzhzh <gzhzh@ibos.com.cn> */ /** * 任务指派模块------任务指派默认控制器,继承AssignmentBaseController * @package application.modules.assignment.controllers * @version $Id: DefaultController.php 3297 2014-04-29 06:40:54Z gzhzh $ * @author gzhzh <gzhzh@ibos.com.cn> */ namespace application\modules\assignment\controllers; use application\core\model\Log; use application\core\utils\Attach; use application\core\utils\Env; use application\core\utils\Ibos; use application\core\utils\StringUtil; use application\modules\assignment\core\AssignmentOpApi; use application\modules\assignment\model\Assignment; use application\modules\assignment\model\AssignmentApply; use application\modules\assignment\model\AssignmentLog; use application\modules\assignment\model\AssignmentRemind; use application\modules\assignment\utils\Assignment as AssignmentUtil; use application\modules\calendar\model\Calendars; use application\modules\dashboard\model\Stamp; use application\modules\message\model\NotifyMessage; use application\modules\user\model\User; use CJSON; class DefaultController extends BaseController { /** * 图章id(暂定3个,4干得不错,2有进步,3继续努力) * @var array */ private $_stamps = array(4, 2, 3); /** * 添加任务 */ public function actionAdd() { //添加是否是post请求类型判断 //如果不是post类型请求,就不用执行&&后面的代码 //防止直接访问该页面时在Env::submitCheck里面判断$_SERVER['HTTP_REFERER']错误 //判断$_SERVER['HTTP_REFERER']错误在其他的页面直接访问时也有可能出现 if (Ibos::app()->request->isPostRequest && Env::submitCheck('addsubmit')) { $this->beforeSave($_POST); // 空值判断 $_POST['uid'] = Ibos::app()->user->uid; $assignmentId = AssignmentOpApi::getInstance()->addAssignment($_POST); $assignment = Assignment::model()->fetchByPk($assignmentId); $returnData = array( 'charge' => User::model()->fetchByUid($assignment['chargeuid']), 'id' => $assignmentId, 'subject' => $assignment['subject'], 'time' => date('m月d日 H:i', $assignment['starttime']) . '--' . date('m月d日 H:i', $assignment['endtime']) ); // 日志记录 $log = array( 'user' => Ibos::app()->user->username, 'ip' => Ibos::app()->setting->get('clientip') , 'isSuccess' => 1 ); Log::write($log, 'action', 'module.assignment.default.add'); $this->ajaxReturn(array('isSuccess' => true, 'data' => $returnData)); } } /** * 编辑任务 */ public function actionEdit() { $uid = Ibos::app()->user->uid; if (!Env::submitCheck('updatesubmit')) { $assignmentId = intval(Env::getRequest('id')); $checkRes = $this->checkAvailableById($assignmentId); if (!$checkRes['isSuccess']) { $this->ajaxReturn($checkRes); } $assignment = Assignment::model()->fetchByPk($assignmentId); // 只有发起人有权编辑任务 if ($uid != $assignment['designeeuid']) { $this->ajaxReturn(array('isSuccess' => false, 'msg' => Ibos::lang('You donot have permission to edit'))); } // 附件 if (!empty($assignment['attachmentid'])) { $assignment['attachs'] = Attach::getAttach($assignment['attachmentid']); } $assignment['starttime'] = empty($assignment['starttime']) ? '' : date('Y-m-d H:i', $assignment['starttime']); $assignment['endtime'] = empty($assignment['endtime']) ? '' : date('Y-m-d H:i', $assignment['endtime']); $assignment['chargeuid'] = StringUtil::wrapId($assignment['chargeuid']); $assignment['participantuid'] = StringUtil::wrapId($assignment['participantuid']); $assignment['lang'] = Ibos::getLangSource('assignment.default'); $assignment['assetUrl'] = Ibos::app()->assetManager->getAssetsUrl('assignment'); $editAlias = 'application.modules.assignment.views.default.edit'; $editView = $this->renderPartial($editAlias, $assignment, true); echo $editView; } else { $assignmentId = intval(Env::getRequest('id')); $assignment = Assignment::model()->fetchByPk($assignmentId); $this->beforeSave($_POST); // 空值判断 $data = AssignmentUtil::handlePostData($_POST); $data['updatetime'] = TIMESTAMP; $updateSuccess = Assignment::model()->updateByPk($assignmentId, $data); if ($updateSuccess) { $opApi = AssignmentOpApi::getInstance(); // 更新附件 Attach::updateAttach($data['attachmentid']); // 如果修改了负责人,发送消息提醒 if ($data['chargeuid'] != $assignment['chargeuid']) { $chargeuid = StringUtil::getId($_POST['chargeuid']); $participantuid = StringUtil::getId($_POST['participantuid']); $uidArr = array_merge($participantuid, $chargeuid); $opApi->sendNotify($uid, $assignmentId, $data['subject'], $uidArr, 'assignment_new_message'); } // 发表一条编辑评论 $opApi->addStepComment($uid, $assignmentId, Ibos::lang('Eidt the assignment')); // 记录日志 AssignmentLog::model()->addLog($uid, $assignmentId, 'edit', Ibos::lang('Eidt the assignment')); $this->ajaxReturn(array('isSuccess' => true, 'msg' => Ibos::lang('Update succeed', 'message'))); } else { $this->ajaxReturn(array('isSuccess' => false, 'msg' => Ibos::lang('Update failed', 'message'))); } } } /** * 删除任务 */ public function actionDel() { if (Ibos::app()->request->isAjaxRequest) { $assignmentId = intval(Env::getRequest('id')); $checkRes = $this->checkAvailableById($assignmentId); if (!$checkRes['isSuccess']) { $this->ajaxReturn($checkRes); } $assignment = Assignment::model()->fetchByPk($assignmentId); // 只有发起人有权删除任务 $uid = Ibos::app()->user->uid; if ($uid != $assignment['designeeuid']) { $this->ajaxReturn(array('isSuccess' => false, 'msg' => Ibos::lang('You donot have permission to delete'))); } // 删除附件 if (!empty($assignment['attachmentid'])) { Attach::delAttach($assignment['attachmentid']); } //若已安装日程,删除关联表数据和有提醒时间的日程 if ($this->getIsInstallCalendar() && !empty($assignment['remindtime'])) { Calendars::model()->deleteALL("`calendarid` IN(select `calendarid` from {{assignment_remind}} where assignmentid = {$assignmentId}) "); AssignmentRemind::model()->deleteAll("assignmentid = {$assignmentId}"); } // 记录日志 AssignmentLog::model()->addLog($uid, $assignmentId, 'del', Ibos::lang('Delete the assignment')); // 删除任务 Assignment::model()->deleteByPk($assignmentId); AssignmentApply::model()->deleteAll("assignmentid = {$assignmentId}"); $this->ajaxReturn(array('isSuccess' => true, 'msg' => Ibos::lang('Del succeed', 'message'))); } } /** * 任务详细页 */ public function actionShow() { $op = Env::getRequest('op'); $uid = Ibos::app()->user->uid; if (empty($op)) { $assignmentId = intval(Env::getRequest('assignmentId')); // 参数检查 $checkRes = $this->checkAvailableById($assignmentId); if (!$checkRes['isSuccess']) { $this->error($checkRes['msg'], $this->createUrl('unfinished/index')); } $assignment = Assignment::model()->fetchByPk($assignmentId); // 权限检查 if (!$this->checkShowPermissions($assignment) && !$this->checkIsSup($assignment)) { $this->error(Ibos::lang('You donot have permission to view'), $this->createUrl('unfinished/index')); } // 取出附件 if (!empty($assignment['attachmentid'])) { $assignment['attach'] = Attach::getAttach($assignment['attachmentid']); } // 图章 if (!empty($assignment['stamp'])) { $assignment['stampUrl'] = Stamp::model()->fetchStampById($assignment['stamp']); } // 拿出延期、取消申请记录 $apply = AssignmentApply::model()->fetchByAttributes(array('assignmentid' => $assignmentId)); $applyData = $this->handleApplyData($assignmentId, $apply); // 是否指派人(用于此任务延期、取消处理) $isDesigneeuid = $this->checkIsDesigneeuid($assignment['designeeuid']); // 是否负责人 $isChargeuid = $this->checkIsChargeuid($assignment['chargeuid']); // 如果是未读状态,改变成进行中 if ($isChargeuid && $assignment['status'] == 0) { Assignment::model()->modify($assignmentId, array('status' => 1)); $assignment['status'] = 1; } // 记录日志 AssignmentLog::model()->addLog($uid, $assignmentId, 'view', Ibos::lang('View the assignment')); // 参与人 $participantuidArr = explode(',', $assignment['participantuid']); $participantuid = array_filter($participantuidArr, create_function('$v', 'return !empty($v);')); $reminds = AssignmentRemind::model()->fetchAllByUid($uid); $assignment['remindtime'] = in_array($assignmentId, array_keys($reminds)) ? $reminds[$assignmentId] : 0; $params = array( 'isDesigneeuid' => $isDesigneeuid, 'isChargeuid' => $isChargeuid, 'designee' => User::model()->fetchByUid($assignment['designeeuid']), // 发起人 'charge' => User::model()->fetchByUid($assignment['chargeuid']), // 负责人 'participantCount' => count($participantuid), 'participant' => User::model()->fetchRealnamesByUids($participantuid, '、'), 'assignment' => AssignmentUtil::handleShowData($assignment), 'applyData' => CJSON::encode($applyData) ); $this->setPageTitle(Ibos::lang('See the assignment details')); $this->setPageState('breadCrumbs', array( array('name' => Ibos::lang('Assignment'), 'url' => $this->createUrl('unfinished/index')), array('name' => Ibos::lang('Assignment details')) )); NotifyMessage::model()->setReadByUrl($uid, Ibos::app()->getRequest()->getUrl()); $this->render('show', $params); } else { $this->$op(); } } /** * 添加、修改提交前负责人、任务内容是否为空检查 */ protected function beforeSave($postData) { if (empty($postData['chargeuid'])) { $this->error(Ibos::lang('Head cannot be empty'), $this->createUrl('unfinished/index')); } if (empty($postData['subject'])) { $this->error(Ibos::lang('Content cannot be empty'), $this->createUrl('unfinished/index')); } if (empty($postData['endtime'])) { $this->error(Ibos::lang('The end time cannot be empty'), $this->createUrl('unfinished/index')); } } /** * 处理取消、延期申请的前台数据 * @param array $apply 申请记录 * @return array */ protected function handleApplyData($assignmentId, $apply) { $applyData = array(); if (!empty($apply)) { if ($apply['isdelay']) { // 延期申请 $applyData = array('id' => $assignmentId, 'uid' => $apply['uid'], 'reason' => $apply['delayreason'], 'startTime' => date('m月d日 H:i', $apply['delaystarttime']), 'endTime' => date('m月d日 H:i', $apply['delayendtime'])); } else { // 取消申请 $applyData = array('id' => $assignmentId, 'uid' => $apply['uid'], 'reason' => $apply['cancelreason']); } } return $applyData; } /** * 获取图章信息 * @return array */ public function getStamps() { $stamps = array(); foreach ($this->_stamps as $id) { $stamp = Stamp::model()->fetchByPk($id); $stamps[] = array( 'path' => $stamp['icon'], 'stampPath' => $stamp['stamp'], 'stamp' => $stamp['stamp'], 'title' => $stamp['code'], 'value' => $id ); } return $stamps; } }
vlinhd11/IBOS
system/modules/assignment/controllers/DefaultController.php
PHP
lgpl-3.0
13,474
/* * @BEGIN LICENSE * * Psi4: an open-source quantum chemistry software package * * Copyright (c) 2007-2022 The Psi4 Developers. * * The copyrights for code used from other parties are included in * the corresponding files. * * This file is part of Psi4. * * Psi4 is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, version 3. * * Psi4 is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License along * with Psi4; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * @END LICENSE */ #include "pointgrp.h" #include "petitelist.h" #include "sobasis.h" #include "molecule.h" #include "basisset.h" #include "gshell.h" #include "dimension.h" #include "matrix.h" #include "psi4/psi4-dec.h" #include <cstdio> #include "psi4/libpsi4util/PsiOutStream.h" #include "psi4/liboptions/liboptions.h" #include "psi4/libpsi4util/process.h" ; using namespace psi; /////////////////////////////////////////////////////////////////////////////// SOTransform::SOTransform() { naoshell_allocated = 0; naoshell = 0; aoshell = nullptr; } SOTransform::~SOTransform() { delete[] aoshell; } void SOTransform::set_naoshell(int n) { naoshell = 0; delete[] aoshell; naoshell_allocated = n; aoshell = new SOTransformShell[n]; } void SOTransform::add_transform(int aoshellnum, int irrep, double coef, int aofunc, int sofunc) { // outfile->Printf( "SOTransform::add_transform(aoshellnum = %d, irrep = %d, coef = %lf, aofunc = %d, sofunc = // %d)\n", aoshellnum, irrep, coef, aofunc, sofunc); int i; for (i = 0; i < naoshell; i++) { if (aoshell[i].aoshell == aoshellnum) break; } if (i >= naoshell_allocated) { throw PSIEXCEPTION("SOTransform::add_transform: allocation too small"); } aoshell[i].add_func(irrep, coef, aofunc, sofunc); aoshell[i].aoshell = aoshellnum; if (i == naoshell) naoshell++; } /////////////////////////////////////////////////////////////////////////////// AOTransform::AOTransform() { for (int h = 0; h < 8; h++) nfuncpi[h] = 0; } AOTransform::~AOTransform() {} void AOTransform::add_transform(int irrep, double coef, int aofunc, int sofunc) { soshell.push_back(AOTransformFunction(coef, aofunc, sofunc, irrep)); soshellpi[irrep].push_back(AOTransformFunction(coef, aofunc, sofunc, irrep)); nfuncpi[irrep]++; } /////////////////////////////////////////////////////////////////////////////// SOTransformShell::SOTransformShell() { nfunc = 0; func = nullptr; } SOTransformShell::~SOTransformShell() { if (func) delete[] func; } void SOTransformShell::add_func(int irrep, double coef, int aofunc, int sofunc) { auto *newfunc = new SOTransformFunction[nfunc + 1]; for (int i = 0; i < nfunc; i++) newfunc[i] = func[i]; delete[] func; func = newfunc; func[nfunc].irrep = irrep; func[nfunc].coef = coef; func[nfunc].aofunc = aofunc; func[nfunc].sofunc = sofunc; nfunc++; } /////////////////////////////////////////////////////////////////////////////// SOBasisSet::SOBasisSet(const std::shared_ptr<BasisSet> &basis, const IntegralFactory *integral) : basis_(basis), integral_(integral) { init(); } SOBasisSet::SOBasisSet(const std::shared_ptr<BasisSet> &basis, const std::shared_ptr<IntegralFactory> &integral) : basis_(basis), integral_(integral.get()) { init(); } void SOBasisSet::init() { int i, j, k; std::shared_ptr<Molecule> mol = basis_->molecule(); CharacterTable ct = mol->point_group()->char_table(); nirrep_ = ct.nirrep(); // count the number of so shells nshell_ = 0; for (i = 0; i < mol->nunique(); i++) { nshell_ += basis_->nshell_on_center(mol->unique(i)); } //=----- Begin debug printing -----= // outfile->Printf( "SOBasis:\n"); // outfile->Printf( "nshell_ = %d\n", nshell_); //=----- End debug printing -----= // Allocate memory for unique shell to am ushell_am_ = new int[nshell_]; // map each ao shell to an so shell auto *aoshell_to_soshell = new int[basis_->nshell()]; int soshell = 0; for (i = 0; i < mol->nunique(); i++) { for (j = 0; j < basis_->nshell_on_center(mol->unique(i)); j++) { for (k = 0; k < mol->nequivalent(i); k++) { int aoshell = basis_->shell_on_center(mol->equivalent(i, k), j); aoshell_to_soshell[aoshell] = soshell; // outfile->Printf( "i = %d j = %d k = %d aoshell = %d soshell = %d, mol->equivalent = // %d\n", // i, j, k, aoshell, soshell, mol->equivalent(i,k)); } // For each so shell obtain its angular momentum ushell_am_[soshell] = basis_->shell(mol->unique(i), j).am(); soshell++; } } //=----- Begin debug printing -----= // outfile->Printf( "Final aoshell_to_soshell:\n"); // for (i = 0; i < basis_->nshell(); ++i) { // outfile->Printf( "aoshell_to_soshell[%d] = %d\n", i, aoshell_to_soshell[i]); // } //=----- End debug printing -----= ncomp_ = new int[nirrep_]; for (i = 0; i < nirrep_; i++) { ncomp_[i] = ct.gamma(i).degeneracy(); if (ncomp_[i] != 1) { throw PSIEXCEPTION("SOBasis::SOBasis: not tested for degenerate point groups"); } } naofunc_ = new int[nshell_]; memset(naofunc_, 0, sizeof(int) * nshell_); nfunc_ = new int *[nshell_]; funcoff_ = new int *[nshell_]; for (i = 0; i < nshell_; i++) { nfunc_[i] = new int[nirrep_]; funcoff_[i] = new int[nirrep_]; for (j = 0; j < nirrep_; j++) { nfunc_[i][j] = 0; } } bool include_pure_transform = false; petite_ = std::make_shared<PetiteList>(basis_, integral_, include_pure_transform); // petite_->print(); int nblocks = petite_->nblocks(); SO_block *soblocks(petite_->compute_aotoso_info()); // for (i=0; i<nblocks; ++i) { // outfile->Printf( "soblock[%d]\n", i); // soblocks[i].print(""); // } // == Begin forming (A|S)OTransform array == sotrans_ = new SOTransform[nshell_]; // nshell_ is symmetry unique shells aotrans_ = new AOTransform[basis_->nshell()]; // we need the ao shell number here for (i = 0; i < nblocks; i++) { for (j = 0; j < soblocks[i].len; j++) { if (soblocks[i].so[j].length == 0) continue; int bfn0 = soblocks[i].so[j].cont[0].bfn; int aoshell0 = include_pure_transform ? basis_->ao_to_shell(bfn0) : basis_->function_to_shell(bfn0); int soshell0 = aoshell_to_soshell[aoshell0]; int atom0 = basis_->shell_to_center(aoshell0); int nequiv0 = mol->nequivalent(mol->atom_to_unique(atom0)); sotrans_[soshell0].set_naoshell(nequiv0); // outfile->Printf( "i = %d j = %d bfn0 = %d aoshell0 = %d soshell0 = %d atom0 = %d nequiv0 = // %d\n", i, j, bfn0, aoshell0, soshell0, atom0, nequiv0); } } int nfuncall = 0; for (i = 0; i < nblocks; i++) { int irrep = ct.which_irrep(i); for (j = 0; j < soblocks[i].len; j++) { if (soblocks[i].so[j].length == 0) continue; int bfn0 = soblocks[i].so[j].cont[0].bfn; int aoshell0 = include_pure_transform ? basis_->ao_to_shell(bfn0) : basis_->function_to_shell(bfn0); int soshell0 = aoshell_to_soshell[aoshell0]; int sofunc = nfunc_[soshell0][irrep]; int naofunc = include_pure_transform ? basis_->shell(aoshell0).ncartesian() : basis_->shell(aoshell0).nfunction(); if (naofunc_[soshell0] && (naofunc_[soshell0] != naofunc)) { throw PSIEXCEPTION("SOBasis::SOBasis: mismatch in naofunc"); } naofunc_[soshell0] = naofunc; nfunc_[soshell0][irrep]++; nfuncall++; for (k = 0; k < soblocks[i].so[j].length; k++) { int bfn = soblocks[i].so[j].cont[k].bfn; double coef = soblocks[i].so[j].cont[k].coef; int aoshell = include_pure_transform ? basis_->ao_to_shell(bfn) : basis_->function_to_shell(bfn); int aoshellfunc = bfn - (include_pure_transform ? basis_->shell_to_ao_function(aoshell) : basis_->shell_to_basis_function(aoshell)); int soshell = aoshell_to_soshell[aoshell]; if (soshell != soshell0) { throw PSIEXCEPTION("SOBasis::SOBasis: shell changed"); } sotrans_[soshell].add_transform(aoshell, irrep, coef, aoshellfunc, sofunc); aotrans_[aoshell].add_transform(irrep, coef, aoshellfunc, sofunc); } } } // == End forming (A|S)OTransform array == if (nfuncall != basis_->nbf()) { throw PSIEXCEPTION("SOBasis::SOBasis: miscounted number of functions"); } for (i = 0; i < nshell_; i++) { funcoff_[i][0] = 0; for (j = 1; j < nirrep_; j++) { funcoff_[i][j] = funcoff_[i][j - 1] + nfunc_[i][j - 1]; // outfile->Printf( "funcoff_[%d][%d] = %d\n", i, j, funcoff_[i][j]); } } for (int i = 0; i < basis_->nshell(); ++i) { int usoshell = aoshell_to_soshell[i]; aotrans_[i].add_offsets(nirrep_, funcoff_[usoshell]); } delete[] aoshell_to_soshell; delete[] soblocks; func_ = new int[nshell_]; irrep_ = new int[basis_->nbf()]; func_within_irrep_ = new int[basis_->nbf()]; nfunc_in_irrep_ = new int[nirrep_]; for (i = 0; i < nirrep_; i++) nfunc_in_irrep_[i] = 0; if (nshell_) { func_[0] = 0; for (i = 1; i < nshell_; i++) { func_[i] = func_[i - 1] + nfunction(i - 1); // outfile->Printf( "func_[%d] = %d\n", i, func_[i]); } int ibasis_ = 0; for (i = 0; i < nshell_; i++) { for (j = 0; j < nirrep_; j++) { for (k = 0; k < nfunc_[i][j]; k++, ibasis_++) { irrep_[ibasis_] = j; func_within_irrep_[ibasis_] = nfunc_in_irrep_[j]++; // outfile->Printf( "irrep_[%d] = %d func_within_irrep_[%d] = %d\n", ibasis_, j, // ibasis_, func_within_irrep_[ibasis_]); } } } } // Create a map that has a key/value pair // The key is the angular momentum function of the SO shell arranged in decending order // The value is the actual shell number typedef std::pair<int, int> am_to_so_shell_pair; std::multimap<int, int, std::less<int> > am_to_so_shell_list; for (int i = 0; i < nshell_; i++) { am_to_so_shell_list.insert(am_to_so_shell_pair(naofunction(i), i)); // std::cout << "naofunctions(" << i << ") = " << naofunction(i) << std::endl; } // This puts the sorted SO shell values into the sorted_so_shell_list_ vector, // which can be used by the integral iterator to look up the value of the sorted shells std::multimap<int, int, std::less<int> >::iterator it; for (it = am_to_so_shell_list.begin(); it != am_to_so_shell_list.end(); it++) { // std::cout << "sorted shell size = " << it->first << // "\t, which belongs to shell number " << it->second << std::endl; sorted_so_shell_list_.push_back(it->second); } // print(); } SOBasisSet::~SOBasisSet() { for (int i = 0; i < nshell_; i++) { delete[] nfunc_[i]; delete[] funcoff_[i]; } delete[] nfunc_; delete[] funcoff_; delete[] naofunc_; delete[] ncomp_; delete[] sotrans_; delete[] aotrans_; delete[] func_; delete[] irrep_; delete[] func_within_irrep_; delete[] nfunc_in_irrep_; delete[] ushell_am_; } int SOBasisSet::max_nfunction_in_shell() const { int maxn = 0; for (int i = 0; i < nshell_; i++) { int n = nfunction(i); if (n > maxn) maxn = n; } return maxn; } int SOBasisSet::nfunction(int ishell) const { int n = 0; for (int i = 0; i < nirrep_; i++) { n += nfunc_[ishell][i]; } return n; } void SOBasisSet::print(std::string out) const { int i, j, k; std::shared_ptr<psi::PsiOutStream> printer = (out == "outfile" ? outfile : std::make_shared<PsiOutStream>(out)); printer->Printf(" SOBasis:\n"); printer->Printf(" nshell(SO) = %d\n", nshell_); printer->Printf(" nirrep = %d\n", nirrep_); printer->Printf(" ncomp = ["); for (i = 0; i < nirrep_; i++) printer->Printf(" %3d", ncomp_[i]); printer->Printf(" ]\n"); printer->Printf(" nfunc:\n"); for (i = 0; i < nshell_; i++) { printer->Printf(" %3d:", i); for (j = 0; j < nirrep_; j++) printer->Printf(" %3d", nfunc_[i][j]); printer->Printf("\n"); } printer->Printf(" irrep = ["); for (i = 0; i < basis_->nbf(); ++i) { printer->Printf(" %4d", irrep_[i]); } printer->Printf("]\n"); printer->Printf(" func = ["); for (i = 0; i < nshell_; ++i) { printer->Printf(" %4d", func_[i]); } printer->Printf("]\n"); printer->Printf(" func_within_irrep = ["); for (i = 0; i < basis_->nbf(); ++i) { printer->Printf(" %4d", func_within_irrep_[i]); } printer->Printf("]\n"); printer->Printf(" nfunc_in_irrep = ["); for (i = 0; i < nirrep_; ++i) { printer->Printf(" %4d", nfunc_in_irrep_[i]); } printer->Printf("]\n"); printer->Printf(" funcoff = [\n"); for (i = 0; i < nshell_; i++) { printer->Printf(" %3d:", i); for (j = 0; j < nirrep_; j++) printer->Printf(" %3d", funcoff_[i][j]); printer->Printf("\n"); } printer->Printf(" sotransform:\n"); for (i = 0; i < nshell_; i++) { if (i > 0) printer->Printf("\n"); for (j = 0; j < sotrans_[i].naoshell; j++) { for (k = 0; k < sotrans_[i].aoshell[j].nfunc; k++) { printer->Printf(" SO(%3d %2d %d [%2d]) += %12.8f * AO(%3d %2d)\n", i, sotrans_[i].aoshell[j].func[k].sofunc, sotrans_[i].aoshell[j].func[k].irrep, function_offset_within_shell(i, sotrans_[i].aoshell[j].func[k].irrep) + sotrans_[i].aoshell[j].func[k].sofunc, sotrans_[i].aoshell[j].func[k].coef, sotrans_[i].aoshell[j].aoshell, sotrans_[i].aoshell[j].func[k].aofunc); } } } printer->Printf(" aotransform:\n"); for (i = 0; i < basis_->nshell(); ++i) { if (i > 0) printer->Printf("\n"); for (j = 0; j < (int)aotrans_[i].soshell.size(); ++j) { printer->Printf(" AO(%3d) sofunc %d aofunc %d irrep %d coef %12.8f\n", i, aotrans_[i].soshell[j].sofunc, aotrans_[i].soshell[j].aofunc, aotrans_[i].soshell[j].irrep, aotrans_[i].soshell[j].coef); } } } std::shared_ptr<BasisSet> SOBasisSet::basis() const { return basis_; } Dimension SOBasisSet::dimension() const { auto petite = std::make_shared<PetiteList>(basis_, integral_); return petite->SO_basisdim(); } const std::shared_ptr<PetiteList> SOBasisSet::petite_list() const { return petite_; }
psi4/psi4
psi4/src/psi4/libmints/sobasis.cc
C++
lgpl-3.0
16,063
# # Copyright 2010 Red Hat, Inc. # # This is free software; you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation; either version 3 of # the License, or (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this software; if not, write to the Free # Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA, or see the FSF site: http://www.fsf.org. Factory.define :account_request do |ar| ar.email "email@example.com" end
auser/steamcannon
spec/factories/account_request_factory.rb
Ruby
lgpl-3.0
868
<?php /*************************************************************************************/ /* This file is part of the Thelia package. */ /* */ /* Copyright (c) OpenStudio */ /* email : dev@thelia.net */ /* web : http://www.thelia.net */ /* */ /* For the full copyright and license information, please view the LICENSE.txt */ /* file that was distributed with this source code. */ /*************************************************************************************/ namespace TNTFrance\WebService\Model; use InvalidArgumentException; /** * Class BaseModel * @author Julien Chanséaume <julien@thelia.net> */ abstract class BaseModel { /** * @param $name * @param $arguments * @return mixed * @throws \Symfony\Component\Serializer\Exception\InvalidArgumentException * @throws \BadFunctionCallException */ public function __call($name, $arguments) { if (method_exists($this, $name)) { return call_user_func($this->$name, $arguments); } else { if (substr($name, 0, 3) === "get") { if (!empty($arguments)) { throw new InvalidArgumentException("The function ".$name." in ".get_class($this)." doesn't take any argument."); } $realName = $this->getProprietyName($name); if (property_exists($this, $realName)) { return $this->$realName; } } elseif (substr($name, 0, 3) === "set") { if (count($arguments) !== 1) { throw new InvalidArgumentException("The function ".$name." in ".get_class($this)." take only one argument."); } $realName = $this->getProprietyName($name); $this->$realName = $arguments[array_keys($arguments)[0]]; return $this; } throw new \BadFunctionCallException("The function ".$name." doesn't exist in ".get_class($this)); } } public function toArray($fields = []) { $vars = get_object_vars($this); $varsArray = []; foreach ($vars as $varName => $varValue) { if (null !== $varValue && (empty($fields) || in_array($varName, $fields))) { if ($varValue instanceof BaseModel) { $varsArray[$varName] = $varValue->toArray(); } else { $varsArray[$varName] = $varValue; } } } return $varsArray; } private function getProprietyName($name) { $proprietyName = strtolower(substr($name, 3, 1)).substr($name, 4); return $proprietyName; } }
thelia-modules/TNTFrance
WebService/Model/BaseModel.php
PHP
lgpl-3.0
3,115
/* MOD_V2.0 * Copyright (c) 2012 OpenDA Association * All rights reserved. * * This file is part of OpenDA. * * OpenDA is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 3 of * the License, or (at your option) any later version. * * OpenDA is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with OpenDA. If not, see <http://www.gnu.org/licenses/>. */ package org.costa; import org.openda.application.ApplicationRunner; import java.io.IOException; /** * Test for COSTA Components, executable version */ public class CostaOpendaWAQUATestExe { public static void main(String [] args) throws IOException { ApplicationRunner.setRunningInTest(true); org.openda.application.OpenDaApplication.main(args); //CostaOpendaWAQUATest.testWaqua_EnKF(args); } }
OpenDA-Association/OpenDA
core/native/src/openda/org/costa/CostaOpendaWAQUATestExe.java
Java
lgpl-3.0
1,200
package archie.editor.commands; import org.eclipse.gef.commands.Command; import archie.model.Tim; import archie.model.shapes.CodeElement; import archie.monitoring.MonitoringManager; public class UnmarkCodeElementCommand extends Command { CodeElement ce = null; Tim tim = null; public UnmarkCodeElementCommand() { super(UnmarkCodeElementAction.UNMARK); } public void setShape(CodeElement ce) { this.ce = ce; } public void setTim(Tim inTim) { this.tim = inTim; } @Override public boolean canExecute() { return ce.isMarked(); } @Override public void execute() { // Unmark MonitoringManager.getIntance().unmarkAndClearWarnings(ce.getAssociatedPath()); } @Override public void undo() { // Remark MonitoringManager.getIntance().markAndGenerateWarnings(ce.getAssociatedPath()); } }
SoftwareEngineeringToolDemos/FSE-2014-Archie-Smart-IDE
src/archie/editor/commands/UnmarkCodeElementCommand.java
Java
lgpl-3.0
824
# unpack nominal resolution # Reader - ADIwg JSON to internal data structure # History: # Stan Smith 2019-09-22 original script require_relative 'module_measure' module ADIWG module Mdtranslator module Readers module MdJson module NominalResolution def self.unpack(hResolution, responseObj, inContext = nil) @MessagePath = ADIWG::Mdtranslator::Readers::MdJson::MdJson # return nil object if input is empty if hResolution.empty? @MessagePath.issueWarning(960, responseObj, inContext) return nil end # instance classes needed in script intMetadataClass = InternalMetadata.new intResolution = intMetadataClass.newNominalResolution outContext = 'nominal resolution' outContext = inContext + ' > ' + outContext unless inContext.nil? haveRequired = 0 # nominal resolution - scanning resolution (required if) if hResolution.has_key?('scanningResolution') hMeasure = hResolution['scanningResolution'] unless hMeasure.empty? hMeasure['type'] = 'distance' hReturn = Measure.unpack(hMeasure, responseObj, outContext) unless hReturn.nil? intResolution[:scanningResolution] = hReturn haveRequired += 1 end end end # nominal resolution - ground resolution (required if) if hResolution.has_key?('groundResolution') hMeasure = hResolution['groundResolution'] unless hMeasure.empty? hMeasure['type'] = 'distance' hReturn = Measure.unpack(hMeasure, responseObj, outContext) unless hReturn.nil? intResolution[:groundResolution] = hReturn haveRequired += 1 end end end unless haveRequired > 0 @MessagePath.issueError(961, responseObj, inContext) end if haveRequired == 2 @MessagePath.issueError(962, responseObj, inContext) end return intResolution end end end end end end
adiwg/mdTranslator
lib/adiwg/mdtranslator/readers/mdJson/modules/module_nominalResolution.rb
Ruby
unlicense
2,650
<?php /** * The template for displaying content in the page.php template * * @package WordPress * @subpackage P2C Plus * @since P2C Plus 1.0 */ get_header(); ?> <?php get_footer(); ?>
PowerToChange/gototheworld-theme
content-page.php
PHP
unlicense
191
#! /usr/bin/env ruby # -*- coding: UTF-8 -*- # require 'efl/elm/elm_object' require 'efl/native/elm/elm_win' # module Efl # module Elm # class ElmWin < ElmObject # search_prefixes 'elm_win_' # def initialize parent, title, type=:elm_win_basic, &block super Native.method(:elm_win_add), parent, title, type, &block end def inwin_add ElmInWin.new @ptr end def screen_position_get x = FFI::MemoryPointer.new :int y = FFI::MemoryPointer.new :int Native.elm_win_screen_position_get @ptr, x, y [ x.read_int, y.read_int ] end alias :screen_position :screen_position_get end # end end # # EOF
maikodaraine/EnlightenmentUbuntu
bindings/ruby/ffi-efl/lib/efl/elm/elm_win.rb
Ruby
unlicense
845
<?php define('EmpireCMSAdmin','1'); require("../../class/connect.php"); require("../../class/db_sql.php"); require("../../class/functions.php"); $link=db_connect(); $empire=new mysqlquery(); $editor=1; //验证用户 $lur=is_login(); $logininid=$lur['userid']; $loginin=$lur['username']; $loginrnd=$lur['rnd']; $loginlevel=$lur['groupid']; $loginadminstyleid=$lur['adminstyleid']; //验证权限 CheckLevel($logininid,$loginin,$classid,"memberf"); $enews=$_GET['enews']; $ftype=" checked"; $record="<!--record-->"; $field="<!--field--->"; $url="<a href='ListMemberForm.php'>管理会员表单</a>&nbsp;>&nbsp;增加会员表单"; $postword='增加会员表单'; if($enews=="AddMemberForm"&&$_GET['docopy']) { $fid=(int)$_GET['fid']; $ftype=""; $r=$empire->fetch1("select * from {$dbtbpre}enewsmemberform where fid='$fid'"); $url="<a href='ListMemberForm.php'>管理会员表单</a>&nbsp;>&nbsp;复制会员表单: ".$r['fname']; $postword='复制会员表单'; } //修改 if($enews=="EditMemberForm") { $fid=(int)$_GET['fid']; $ftype=""; $url="<a href='ListMemberForm.php'>管理会员表单</a>&nbsp;>&nbsp;修改会员表单"; $postword='修改会员表单'; $r=$empire->fetch1("select * from {$dbtbpre}enewsmemberform where fid='$fid'"); } //取得字段 $no=0; $fsql=$empire->query("select f,fname from {$dbtbpre}enewsmemberf order by myorder,fid"); while($fr=$empire->fetch($fsql)) { $no++; $bgcolor="ffffff"; if($no%2==0) { $bgcolor="#F8F8F8"; } $like=$field.$fr[f].$record; $slike=",".$fr[f].","; //录入项 $enterchecked=""; if(strstr($r[enter],$like)) { $enterchecked=" checked"; //取得字段标识 $dor=explode($like,$r[enter]); if(strstr($dor[0],$record)) { $dor1=explode($record,$dor[0]); $last=count($dor1)-1; $fr[fname]=$dor1[$last]; } else { $fr[fname]=$dor[0]; } } $entercheckbox="<input name=center[] type=checkbox value='".$fr[f]."'".$enterchecked.">"; //前台显示项 if(strstr($r[viewenter],$like)) { $viewenterchecked=" checked"; } else { $viewenterchecked=""; } $viewentercheckbox="<input name=venter[] type=checkbox value='".$fr[f]."'".$viewenterchecked.">"; //必填项 $mustfchecked=""; if(strstr($r[mustenter],$slike)) {$mustfchecked=" checked";} $mustfcheckbox="<input name=menter[] type=checkbox value='".$fr[f]."'".$mustfchecked.">"; //搜索项 $searchchecked=""; if(strstr($r[searchvar],$slike)) { $searchchecked=" checked"; } $searchcheckbox="<input name=schange[] type=checkbox value='".$fr[f]."'".$searchchecked.">"; //可增加 $canaddfchecked=""; if(strstr($r[canaddf],$slike)) { $canaddfchecked=" checked"; } if($enews=="AddMemberForm") { $canaddfchecked=" checked"; } $canaddfcheckbox="<input name=canadd[] type=checkbox value='".$fr[f]."'".$canaddfchecked.">"; //可修改 $caneditfchecked=""; if(strstr($r[caneditf],$slike)) { $caneditfchecked=" checked"; } if($enews=="AddMemberForm") { $caneditfchecked=" checked"; } $caneditfcheckbox="<input name=canedit[] type=checkbox value='".$fr[f]."'".$caneditfchecked.">"; $data.="<tr bgcolor='".$bgcolor."'> <td height=25> <div align=center> <input name=cname[".$fr[f]."] type=text value='".$fr[fname]."'> </div></td> <td> <div align=center> <input name=cfield type=text value='".$fr[f]."' readonly> </div></td> <td><div align=center> ".$entercheckbox." </div></td> <td><div align=center> ".$mustfcheckbox." </div></td> <td><div align=center> ".$canaddfcheckbox." </div></td> <td><div align=center> ".$caneditfcheckbox." </div></td> <td><div align=center> ".$searchcheckbox." </div></td> <td><div align=center> ".$viewentercheckbox." </div></td> </tr>"; } db_close(); $empire=null; ?> <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title>增加会员表单</title> <link href="../adminstyle/<?=$loginadminstyleid?>/adminstyle.css" rel="stylesheet" type="text/css"> </head> <body> <table width="100%" border="0" align="center" cellpadding="3" cellspacing="1"> <tr> <td>位置:<?=$url?></td> </tr> </table> <form name="form1" method="post" action="../ecmsmember.php"> <table width="100%" border="0" align="center" cellpadding="3" cellspacing="1" class="tableborder"> <tr> <td height="25" colspan="2" class="header"><?=$postword?> <input name="fid" type="hidden" id="fid" value="<?=$fid?>"> <input name="enews" type="hidden" id="enews" value="<?=$enews?>"> </td> </tr> <tr bgcolor="#FFFFFF"> <td width="19%" height="25">表单名称</td> <td width="81%" height="25"><input name="fname" type="text" id="fname" value="<?=$r[fname]?>" size="43"> <font color="#666666">(比如:个人注册) </font></td> </tr> <tr bgcolor="#FFFFFF"> <td height="25" valign="top">选择本表单的字段项<br> <br> <br> <input type="button" name="Submit3" value="字段管理" onclick="window.open('ListMemberF.php');"> </td> <td height="25" valign="top"><table width="100%" border="0" cellspacing="1" cellpadding="3"> <tr bgcolor="#DBEAF5"> <td width="26%" height="25"> <div align="center">字段标识</div></td> <td width="25%" height="25"> <div align="center">字段名</div></td> <td width="8%"> <div align="center">录入项</div></td> <td width="8%"> <div align="center">必填项</div></td> <td width="8%"><div align="center">可增加</div></td> <td width="8%"><div align="center">可修改</div></td> <td width="8%"><div align="center">搜索项</div></td> <td width="9%"><div align="center">前台显示</div></td> </tr> <?=$data?> </table></td> </tr> <tr bgcolor="#FFFFFF"> <td height="25" valign="top"><p>录入表单模板<br> <br> (<font color="#FF0000"> <input name="ftype" type="checkbox" id="ftype" value="1"<?=$ftype?>> 自动生成表单</font>)</p></td> <td height="25"><textarea name="ftemp" cols="75" rows="20" id="ftemp" style="WIDTH: 100%"><?=ehtmlspecialchars(stripSlashes($r[ftemp]))?></textarea></td> </tr> <tr bgcolor="#FFFFFF"> <td height="25" valign="top">注释:</td> <td height="25"><textarea name="fzs" cols="75" rows="10" id="fzs" style="WIDTH: 100%"><?=stripSlashes($r[fzs])?></textarea></td> </tr> <tr bgcolor="#FFFFFF"> <td height="25">&nbsp;</td> <td height="25"><input type="submit" name="Submit" value="提交"> <input type="reset" name="Submit2" value="重置"></td> </tr> </table> </form> </body> </html>
liangpz521/b2b
e/admin/member/AddMemberForm.php
PHP
unlicense
6,995
//Sample code for SmartStore // This file assumes that all of the javascript and css files required // as well as the required DOM objects are specified in the index.html file. var SAMPLE_SOUP_NAME = "myPeopleSoup"; var lastSoupCursor = null; var sfSmartstore = cordova.require("salesforce/plugin/smartstore"); function regLinkClickHandlers() { var logToConsole = cordova.require("salesforce/util/logger").logToConsole; logToConsole("regLinkClickHandlers"); $('#link_fetch_sfdc_contacts').click(function() { logToConsole("link_fetch_sfdc_contacts clicked"); forcetkClient.query("SELECT Name,Id FROM Contact", onSuccessSfdcContacts, onErrorSfdc); }); $('#link_reset').click(function() { logToConsole("link_reset clicked"); $("#div_device_contact_list").html(""); $("#div_sfdc_contact_list").html(""); $("#div_sfdc_account_list").html(""); $("#div_sfdc_soup_entry_list").html(""); $("#console").html(""); }); $('#link_logout').click(function() { logToConsole("link_logout clicked"); var sfOAuthPlugin = cordova.require("salesforce/plugin/oauth"); sfOAuthPlugin.logout(); }); $('#link_reg_soup').click(function() { logToConsole("link_reg_soup clicked"); var indexes = [ {path:"Name",type:"string"}, {path:"Id",type:"string"} ]; sfSmartstore.registerSoup(SAMPLE_SOUP_NAME, indexes, onSuccessRegSoup, onErrorRegSoup ); }); $('#link_stuff_soup').click(function() { logToConsole("link_stuff_soup clicked"); runStuffSoup(); }); $('#link_remove_soup').click(function() { sfSmartstore.removeSoup(SAMPLE_SOUP_NAME, onSuccessRemoveSoup, onErrorRemoveSoup); }); $('#link_soup_exists').click(function() { sfSmartstore.soupExists(SAMPLE_SOUP_NAME, onSoupExistsDone, onSoupExistsDone); }); $('#link_query_soup').click(function() { runQuerySoup(); }); $('#link_retrieve_entries').click(function() { runRetrieveEntries(); }); $('#link_cursor_page_zero').click(function() { logToConsole("link_cursor_page_zero clicked"); sfSmartstore.moveCursorToPageIndex(lastSoupCursor,0, onSuccessQuerySoup,onErrorQuerySoup); }); $('#link_cursor_page_prev').click(function() { logToConsole("link_cursor_page_prev clicked"); sfSmartstore.moveCursorToPreviousPage(lastSoupCursor,onSuccessQuerySoup,onErrorQuerySoup); }); $('#link_cursor_page_next').click(function() { logToConsole("link_cursor_page_next clicked"); sfSmartstore.moveCursorToNextPage(lastSoupCursor,onSuccessQuerySoup,onErrorQuerySoup); }); } function addEntriesToTestSoup(entries,cb) { var logToConsole = cordova.require("salesforce/util/logger").logToConsole; sfSmartstore.upsertSoupEntries(SAMPLE_SOUP_NAME,entries, function(items) { logToConsole("added entries: " + items.length); $("#div_soup_status_line").html("Soup upsert OK"); if (typeof cb !== "undefined") { cb(items); } }, function(err) { logToConsole("onErrorUpsert: " + err); $("#div_soup_status_line").html("Soup upsert ERROR"); if (typeof cb !== "undefined") { cb(null); } } ); } function addGeneratedEntriesToTestSoup(nEntries, cb) { cordova.require("salesforce/util/logger").logToConsole("addGeneratedEntriesToTestSoup " + nEntries); var entries = []; for (var i = 0; i < nEntries; i++) { var myEntry = { Name: "Todd Stellanova" + i, Id: "00300" + i, attributes:{type:"Contact"} }; entries.push(myEntry); } addEntriesToTestSoup(entries,cb); } function runStuffSoup() { var inputStr = $('#input_stuff_soup_count').val(); if (inputStr.length === 0) { inputStr = null; } var inputVal = 1; if (inputStr !== null) { inputVal = parseInt(inputStr); } addGeneratedEntriesToTestSoup(inputVal); } function runQuerySoup() { var indexPath = $('#input_indexPath').val(); if (indexPath.length === 0) { indexPath = null; } var beginKey = $('#input_querySoup_beginKey').val(); if (beginKey.length === 0) { beginKey = null; } var endKey = $('#input_querySoup_endKey').val(); if (endKey.length === 0) { endKey = null; } var queryType = $('#select_querySoup_type').val(); var pageSizeStr = $('#input_querySoup_pageSize').val(); if (pageSizeStr.length === 0) { pageSizeStr = null; } var pageSizeVal = 25; if (pageSizeStr !== null) { pageSizeVal = parseInt(pageSizeStr); } cordova.require("salesforce/util/logger").logToConsole("querySoup path: '"+ indexPath + "' begin: '" + beginKey + "' end: '" + endKey + "' [" + pageSizeVal + ']'); var querySpec; if ("range" == queryType) { querySpec = sfSmartstore.buildRangeQuerySpec(indexPath,beginKey,endKey,null,pageSizeVal); } else if ("like" == queryType) { querySpec = sfSmartstore.buildLikeQuerySpec(indexPath,beginKey,null,pageSizeVal); } else if ("all" == queryType) { querySpec = sfSmartstore.buildAllQuerySpec(indexPath, null, pageSizeVal) ; } else { //"exact" querySpec = sfSmartstore.buildExactQuerySpec(indexPath,beginKey,null,pageSizeVal); } sfSmartstore.querySoup(SAMPLE_SOUP_NAME,querySpec, onSuccessQuerySoup, onErrorQuerySoup ); } function runRetrieveEntries() { var inputStr = $('#input_retrieve_entries').val(); if (inputStr.length === 0) { inputStr = null; } cordova.require("salesforce/util/logger").logToConsole("runRetrieveEntries: " + inputStr ); var entryIds = eval(inputStr); sfSmartstore.retrieveSoupEntries(SAMPLE_SOUP_NAME, entryIds, onSuccessRetrieveEntries, onErrorRetrieveEntries ); } function onSuccessRegSoup(param) { cordova.require("salesforce/util/logger").logToConsole("onSuccessRegSoup: " + param); $("#div_soup_status_line").html("Soup registered: " + SAMPLE_SOUP_NAME); } function onErrorRegSoup(param) { cordova.require("salesforce/util/logger").logToConsole("onErrorRegSoup: " + param); $("#div_soup_status_line").html("registerSoup ERROR"); } function onSuccessUpsert(param) { cordova.require("salesforce/util/logger").logToConsole("onSuccessUpsert: " + param); $("#div_soup_status_line").html("Soup upsert OK"); } function onErrorUpsert(param) { cordova.require("salesforce/util/logger").logToConsole("onErrorUpsert: " + param); $("#div_soup_status_line").html("Soup upsert ERROR"); } function onSuccessQuerySoup(cursor) { cordova.require("salesforce/util/logger").logToConsole("onSuccessQuerySoup totalPages: " + cursor.totalPages); lastSoupCursor = cursor; $("#div_sfdc_soup_entry_list").html(""); var ul = $('<ul data-role="listview" data-inset="true" data-theme="a" data-dividertheme="a">Page ' + (cursor.currentPageIndex+1) + '/' + cursor.totalPages + ' Entries: ' + cursor.currentPageOrderedEntries.length + ' </ul>'); $("#div_sfdc_soup_entry_list").append(ul); var curPageEntries = cursor.currentPageOrderedEntries; $.each(curPageEntries, function(i,entry) { var formattedName = entry.name; var entryId = entry._soupEntryId; var phatName = entry.Name; if (phatName) { formattedName = phatName; } var newLi = $("<li><a href='#'>" + entryId + " - " + formattedName + "</a></li>"); ul.append(newLi); }); $("#div_sfdc_soup_entry_list").trigger( "create" ); } function onErrorQuerySoup(param) { cordova.require("salesforce/util/logger").logToConsole("onErrorQuerySoup: " + param); } function onSuccessRetrieveEntries(entries ) { cordova.require("salesforce/util/logger").logToConsole("onSuccessRetrieveEntries : " + entries.length); $("#div_sfdc_soup_entry_list").html(""); var ul = $('<ul data-role="listview" data-inset="true" data-theme="a" data-dividertheme="a"> ' + ' Entries: ' + entries.length + ' </ul>'); $("#div_sfdc_soup_entry_list").append(ul); $.each(entries, function(i,entry) { var formattedName = entry.name; var entryId = entry._soupEntryId; var phatName = entry.Name; if (phatName) { formattedName = phatName; } var newLi = $("<li><a href='#'>" + entryId + " - " + formattedName + "</a></li>"); ul.append(newLi); }); $("#div_sfdc_soup_entry_list").trigger( "create" ); } function onErrorRetrieveEntries(param) { cordova.require("salesforce/util/logger").logToConsole("onErrorRetrieveEntries: " + param); } function onSuccessRemoveSoup(param) { cordova.require("salesforce/util/logger").logToConsole("onSuccessRemoveSoup: " + param); $("#div_soup_status_line").html("Soup removed: " + SAMPLE_SOUP_NAME); } function onErrorRemoveSoup(param) { cordova.require("salesforce/util/logger").logToConsole("onErrorRemoveSoup: " + param); $("#div_soup_status_line").html("removeSoup ERROR"); } function onSoupExistsDone(param) { cordova.require("salesforce/util/logger").logToConsole("onSoupExistsDone: " + param); $("#div_soup_status_line").html("Soup exists: " + param); } function onSuccessSfdcContacts(response) { var logToConsole = cordova.require("salesforce/util/logger").logToConsole; logToConsole("onSuccessSfdcContacts: received " + response.totalSize + " contacts"); var entries = []; $("#div_sfdc_contact_list").html(""); var ul = $('<ul data-role="listview" data-inset="true" data-theme="a" data-dividertheme="a"></ul>'); $("#div_sfdc_contact_list").append(ul); ul.append($('<li data-role="list-divider">Salesforce Contacts: ' + response.totalSize + '</li>')); $.each(response.records, function(i, contact) { entries.push(contact); logToConsole("name: " + contact.Name); var newLi = $("<li><a href='#'>" + (i+1) + " - " + contact.Name + "</a></li>"); ul.append(newLi); }); if (entries.length > 0) { sfSmartstore.upsertSoupEntries(SAMPLE_SOUP_NAME, entries, function(items) { var statusTxt = "upserted: " + items.length + " contacts"; logToConsole(statusTxt); $("#div_soup_status_line").html(statusTxt); $("#div_sfdc_contact_list").trigger( "create" ); }, onErrorUpsert); } } function onErrorSfdc(error) { cordova.require("salesforce/util/logger").logToConsole("onErrorSfdc: " + JSON.stringify(error)); alert('Error getting sfdc contacts!'); }
marcus-bessa/MobileCampaign
forcedroid/hybrid/SampleApps/SmartStoreExplorer/assets/www/smartstoreexplorer.js
JavaScript
unlicense
13,221
// // ControlPrinter.cpp // Tonic // // Created by Morgan Packard on 4/28/13. // Copyright (c) 2013 Nick Donaldson. All rights reserved. // #include "ControlPrinter.h" namespace Tonic { namespace Tonic_{ ControlPrinter_::ControlPrinter_() :message("%f\n"), hasPrinted(false){ } void ControlPrinter_::setMessage(string messageArg){ message = messageArg + "\n"; } } // Namespace Tonic_ } // Namespace Tonic
TonicAudio/ofxTonic
src/Tonic/ControlPrinter.cpp
C++
unlicense
450
<?php //发表评论 function AddPl($username,$password,$nomember,$key,$saytext,$id,$classid,$repid,$add){ global $empire,$dbtbpre,$public_r,$class_r,$level_r; //验证本时间允许操作 eCheckTimeCloseDo('pl'); //验证IP eCheckAccessDoIp('pl'); $id=(int)$id; $repid=(int)$repid; $classid=(int)$classid; //验证码 $keyvname='checkplkey'; if($public_r['plkey_ok']) { ecmsCheckShowKey($keyvname,$key,1); } $username=RepPostVar($username); $password=RepPostVar($password); $muserid=(int)getcvar('mluserid'); $musername=RepPostVar(getcvar('mlusername')); $mgroupid=(int)getcvar('mlgroupid'); if($muserid)//已登陆 { $cklgr=qCheckLoginAuthstr(); if($cklgr['islogin']) { $username=$musername; } else { $muserid=0; } } else { if(empty($nomember))//非匿名 { if(!$username||!$password) { printerror("FailPassword","history.go(-1)",1); } $ur=$empire->fetch1("select ".eReturnSelectMemberF('userid,salt,password,checked,groupid')." from ".eReturnMemberTable()." where ".egetmf('username')."='$username' limit 1"); if(empty($ur['userid'])) { printerror("FailPassword","history.go(-1)",1); } if(!eDoCkMemberPw($password,$ur['password'],$ur['salt'])) { printerror("FailPassword","history.go(-1)",1); } if($ur['checked']==0) { printerror("NotCheckedUser",'',1); } $muserid=$ur['userid']; $mgroupid=$ur['groupid']; } else { $muserid=0; } } if($public_r['plgroupid']) { if(!$muserid) { printerror("GuestNotToPl","history.go(-1)",1); } if($level_r[$mgroupid][level]<$level_r[$public_r['plgroupid']][level]) { printerror("NotLevelToPl","history.go(-1)",1); } } //专题 $doaction=$add['doaction']; if($doaction=='dozt') { if(!trim($saytext)||!$classid) { printerror("EmptyPl","history.go(-1)",1); } //是否关闭评论 $r=$empire->fetch1("select ztid,closepl,checkpl,restb from {$dbtbpre}enewszt where ztid='$classid'"); if(!$r['ztid']) { printerror("ErrorUrl","history.go(-1)",1); } if($r['closepl']) { printerror("CloseClassPl","history.go(-1)",1); } //审核 if($r['checkpl']) {$checked=1;} else {$checked=0;} $restb=$r['restb']; $pubid='-'.$classid; $id=0; $returl=$public_r['plurl']."?doaction=dozt&classid=$classid"; } else//信息 { if(!trim($saytext)||!$id||!$classid) { printerror("EmptyPl","history.go(-1)",1); } //表存在 if(empty($class_r[$classid][tbname])) { printerror("ErrorUrl","history.go(-1)",1); } //是否关闭评论 $r=$empire->fetch1("select classid,stb,restb from {$dbtbpre}ecms_".$class_r[$classid][tbname]." where id='$id' limit 1"); if(!$r['classid']||$r['classid']!=$classid) { printerror("ErrorUrl","history.go(-1)",1); } if($class_r[$r[classid]][openpl]) { printerror("CloseClassPl","history.go(-1)",1); } //单信息关闭评论 $pubid=ReturnInfoPubid($classid,$id); $finfor=$empire->fetch1("select closepl from {$dbtbpre}ecms_".$class_r[$classid][tbname]."_data_".$r['stb']." where id='$id' limit 1"); if($finfor['closepl']) { printerror("CloseInfoPl","history.go(-1)",1); } //审核 if($class_r[$classid][checkpl]) {$checked=1;} else {$checked=0;} $restb=$r['restb']; $returl=$public_r['plurl']."?classid=$classid&id=$id"; } //设置参数 $plsetr=$empire->fetch1("select pltime,plsize,plincludesize,plclosewords,plmustf,plf,plmaxfloor,plquotetemp from {$dbtbpre}enewspl_set limit 1"); if(strlen($saytext)>$plsetr['plsize']) { $GLOBALS['setplsize']=$plsetr['plsize']; printerror("PlSizeTobig","history.go(-1)",1); } $time=time(); $saytime=$time; $pltime=getcvar('lastpltime'); if($pltime) { if($time-$pltime<$plsetr['pltime']) { $GLOBALS['setpltime']=$plsetr['pltime']; printerror("PlOutTime","history.go(-1)",1); } } $sayip=egetip(); $username=RepPostStr($username); $username=str_replace("\r\n","",$username); $saytext=nl2br(RepFieldtextNbsp(RepPostStr($saytext))); if($repid) { CkPlQuoteFloor($plsetr['plmaxfloor'],$saytext);//验证楼层 $saytext=RepPlTextQuote($repid,$saytext,$plsetr,$restb); } //过滤字符 $saytext=ReplacePlWord($plsetr['plclosewords'],$saytext); if($level_r[$mgroupid]['plchecked']) { $checked=0; } $ret_r=ReturnPlAddF($add,$plsetr,0); //主表 $sql=$empire->query("insert into {$dbtbpre}enewspl_".$restb."(pubid,username,sayip,saytime,id,classid,checked,zcnum,fdnum,userid,isgood,saytext".$ret_r['fields'].") values('$pubid','".$username."','$sayip','$saytime','$id','$classid','$checked',0,0,'$muserid',0,'".addslashes($saytext)."'".$ret_r['values'].");"); $plid=$empire->lastid(); if($doaction!='dozt') { //信息表加1 $usql=$empire->query("update {$dbtbpre}ecms_".$class_r[$classid][tbname]." set plnum=plnum+1 where id='$id' limit 1"); } //更新新评论数 DoUpdateAddDataNum('pl',$restb,1); //设置最后发表时间 $set1=esetcookie("lastpltime",time(),time()+3600*24); ecmsEmptyShowKey($keyvname);//清空验证码 if($sql) { $reurl=DoingReturnUrl($returl,$_POST['ecmsfrom']); printerror("AddPlSuccess",$reurl,1); } else {printerror("DbError","history.go(-1)",1);} } //替换回复 function RepPlTextQuote($repid,$saytext,$pr,$restb){ global $public_r,$empire,$dbtbpre,$fun_r; $quotetemp=stripSlashes($pr['plquotetemp']); $r=$empire->fetch1("select userid,username,saytime,saytext from {$dbtbpre}enewspl_".$restb." where plid='$repid'"); if(empty($r['username'])) { $r['username']=$fun_r['nomember']; } if($r['userid']) { $r['username']="<a href=\"$public_r[newsurl]e/space/?userid=$r[userid]\" target=\"_blank\">$r[username]</a>"; } $quotetemp=str_replace('[!--plid--]',$repid,$quotetemp); $quotetemp=str_replace('[!--pltime--]',date('Y-m-d H:i:s',$r['saytime']),$quotetemp); $quotetemp=str_replace('[!--username--]',$r['username'],$quotetemp); $quotetemp=str_replace('[!--pltext--]',$r['saytext'],$quotetemp); $restr=$quotetemp.$saytext; return $restr; } //去掉原引用 function RepYPlQuote($text){ $preg_str="/<div (.+?)<\/div>/is"; $text=preg_replace($preg_str,"",$text); return $text; } //验证引用楼数 function CkPlQuoteFloor($plmaxfloor,$saytext){ if(!$plmaxfloor) { return ''; } $fr=explode('<div',$saytext); $fcount=count($fr)-1; if($fcount>$plmaxfloor) { printerror('PlOutMaxFloor','history.go(-1)',1); } } //禁用字符 function ReplacePlWord($plclosewords,$text){ global $empire,$dbtbpre; if(empty($text)) { return $text; } toCheckCloseWord($text,$plclosewords,'HavePlCloseWords'); return $text; } //返回字段 function ReturnPlAddF($add,$pr,$ecms=0){ global $empire,$dbtbpre; $fr=explode(',',$pr['plf']); $count=count($fr)-1; $ret_r['fields']=''; $ret_r['values']=''; for($i=1;$i<$count;$i++) { $f=$fr[$i]; $fval=RepPostStr($add[$f]); //必填 if(strstr($pr[plmustf],','.$f.',')) { if(!trim($fval)) { $chfr=$empire->fetch1("select fname from {$dbtbpre}enewsplf where f='$f' limit 1"); $GLOBALS['msgmustf']=$chfr['fname']; printerror('EmptyPlMustF','',1); } } $fval=nl2br(RepFieldtextNbsp($fval)); $ret_r['fields'].=",".$f; $ret_r['values'].=",'".addslashes($fval)."'"; } return $ret_r; } //支持/反对评论 function DoForPl($add){ global $empire,$dbtbpre,$class_r; $classid=(int)$add['classid']; $id=(int)$add['id']; $plid=(int)$add['plid']; $dopl=(int)$add['dopl']; $doajax=(int)$add['doajax']; //专题 $doaction=$add['doaction']; if($doaction=='dozt') { if(!$classid||!$plid) { $doajax==1?ajax_printerror('','','ErrorUrl',1):printerror('ErrorUrl','',1); } $infor=$empire->fetch1("select ztid,restb from {$dbtbpre}enewszt where ztid='$classid'"); if(!$infor['ztid']) { $doajax==1?ajax_printerror('','','ErrorUrl',1):printerror('ErrorUrl','',1); } $pubid='-'.$classid; } else//信息 { if(!$classid||!$id||!$plid||!$class_r[$classid][tbname]) { $doajax==1?ajax_printerror('','','ErrorUrl',1):printerror('ErrorUrl','',1); } $infor=$empire->fetch1("select classid,restb from {$dbtbpre}ecms_".$class_r[$classid][tbname]." where id='$id' limit 1"); if(!$infor['classid']) { $doajax==1?ajax_printerror('','','ErrorUrl',1):printerror('ErrorUrl','',1); } $pubid=ReturnInfoPubid($classid,$id); } //连续发表 if(getcvar('lastforplid'.$plid)) { $doajax==1?ajax_printerror('','','ReDoForPl',1):printerror('ReDoForPl','',1); } if($dopl==1) { $f='zcnum'; $msg='DoForPlGSuccess'; } else { $f='fdnum'; $msg='DoForPlBSuccess'; } $sql=$empire->query("update {$dbtbpre}enewspl_".$infor['restb']." set ".$f."=".$f."+1 where plid='$plid' and pubid='$pubid'"); if($sql) { esetcookie('lastforplid'.$plid,$plid,time()+30*24*3600); //最后发布 if($doajax==1) { $nr=$empire->fetch1("select ".$f." from {$dbtbpre}enewspl_".$infor['restb']." where plid='$plid' and pubid='$pubid'"); ajax_printerror($nr[$f],$add['ajaxarea'],$msg,1); } else { printerror($msg,$_SERVER['HTTP_REFERER'],1); } } else { $doajax==1?ajax_printerror('','','DbError',1):printerror('DbError','',1); } } ?>
liangpz521/b2b
e/pl/plfun.php
PHP
unlicense
9,103
//===------- ItaniumCXXABI.cpp - Emit LLVM Code from ASTs for a Module ----===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This provides C++ code generation targeting the Itanium C++ ABI. The class // in this file generates structures that follow the Itanium C++ ABI, which is // documented at: // https://itanium-cxx-abi.github.io/cxx-abi/abi.html // https://itanium-cxx-abi.github.io/cxx-abi/abi-eh.html // // It also supports the closely-related ARM ABI, documented at: // https://developer.arm.com/documentation/ihi0041/g/ // //===----------------------------------------------------------------------===// #include "CGCXXABI.h" #include "CGCleanup.h" #include "CGRecordLayout.h" #include "CGVTables.h" #include "CodeGenFunction.h" #include "CodeGenModule.h" #include "TargetInfo.h" #include "clang/AST/Attr.h" #include "clang/AST/Mangle.h" #include "clang/AST/StmtCXX.h" #include "clang/AST/Type.h" #include "clang/CodeGen/ConstantInitBuilder.h" #include "llvm/IR/DataLayout.h" #include "llvm/IR/GlobalValue.h" #include "llvm/IR/Instructions.h" #include "llvm/IR/Intrinsics.h" #include "llvm/IR/Value.h" #include "llvm/Support/ScopedPrinter.h" using namespace clang; using namespace CodeGen; namespace { class ItaniumCXXABI : public CodeGen::CGCXXABI { /// VTables - All the vtables which have been defined. llvm::DenseMap<const CXXRecordDecl *, llvm::GlobalVariable *> VTables; /// All the thread wrapper functions that have been used. llvm::SmallVector<std::pair<const VarDecl *, llvm::Function *>, 8> ThreadWrappers; protected: bool UseARMMethodPtrABI; bool UseARMGuardVarABI; bool Use32BitVTableOffsetABI; ItaniumMangleContext &getMangleContext() { return cast<ItaniumMangleContext>(CodeGen::CGCXXABI::getMangleContext()); } public: ItaniumCXXABI(CodeGen::CodeGenModule &CGM, bool UseARMMethodPtrABI = false, bool UseARMGuardVarABI = false) : CGCXXABI(CGM), UseARMMethodPtrABI(UseARMMethodPtrABI), UseARMGuardVarABI(UseARMGuardVarABI), Use32BitVTableOffsetABI(false) { } bool classifyReturnType(CGFunctionInfo &FI) const override; RecordArgABI getRecordArgABI(const CXXRecordDecl *RD) const override { // If C++ prohibits us from making a copy, pass by address. if (!RD->canPassInRegisters()) return RAA_Indirect; return RAA_Default; } bool isThisCompleteObject(GlobalDecl GD) const override { // The Itanium ABI has separate complete-object vs. base-object // variants of both constructors and destructors. if (isa<CXXDestructorDecl>(GD.getDecl())) { switch (GD.getDtorType()) { case Dtor_Complete: case Dtor_Deleting: return true; case Dtor_Base: return false; case Dtor_Comdat: llvm_unreachable("emitting dtor comdat as function?"); } llvm_unreachable("bad dtor kind"); } if (isa<CXXConstructorDecl>(GD.getDecl())) { switch (GD.getCtorType()) { case Ctor_Complete: return true; case Ctor_Base: return false; case Ctor_CopyingClosure: case Ctor_DefaultClosure: llvm_unreachable("closure ctors in Itanium ABI?"); case Ctor_Comdat: llvm_unreachable("emitting ctor comdat as function?"); } llvm_unreachable("bad dtor kind"); } // No other kinds. return false; } bool isZeroInitializable(const MemberPointerType *MPT) override; llvm::Type *ConvertMemberPointerType(const MemberPointerType *MPT) override; CGCallee EmitLoadOfMemberFunctionPointer(CodeGenFunction &CGF, const Expr *E, Address This, llvm::Value *&ThisPtrForCall, llvm::Value *MemFnPtr, const MemberPointerType *MPT) override; llvm::Value * EmitMemberDataPointerAddress(CodeGenFunction &CGF, const Expr *E, Address Base, llvm::Value *MemPtr, const MemberPointerType *MPT) override; llvm::Value *EmitMemberPointerConversion(CodeGenFunction &CGF, const CastExpr *E, llvm::Value *Src) override; llvm::Constant *EmitMemberPointerConversion(const CastExpr *E, llvm::Constant *Src) override; llvm::Constant *EmitNullMemberPointer(const MemberPointerType *MPT) override; llvm::Constant *EmitMemberFunctionPointer(const CXXMethodDecl *MD) override; llvm::Constant *EmitMemberDataPointer(const MemberPointerType *MPT, CharUnits offset) override; llvm::Constant *EmitMemberPointer(const APValue &MP, QualType MPT) override; llvm::Constant *BuildMemberPointer(const CXXMethodDecl *MD, CharUnits ThisAdjustment); llvm::Value *EmitMemberPointerComparison(CodeGenFunction &CGF, llvm::Value *L, llvm::Value *R, const MemberPointerType *MPT, bool Inequality) override; llvm::Value *EmitMemberPointerIsNotNull(CodeGenFunction &CGF, llvm::Value *Addr, const MemberPointerType *MPT) override; void emitVirtualObjectDelete(CodeGenFunction &CGF, const CXXDeleteExpr *DE, Address Ptr, QualType ElementType, const CXXDestructorDecl *Dtor) override; void emitRethrow(CodeGenFunction &CGF, bool isNoReturn) override; void emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) override; void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override; llvm::CallInst * emitTerminateForUnexpectedException(CodeGenFunction &CGF, llvm::Value *Exn) override; void EmitFundamentalRTTIDescriptors(const CXXRecordDecl *RD); llvm::Constant *getAddrOfRTTIDescriptor(QualType Ty) override; CatchTypeInfo getAddrOfCXXCatchHandlerType(QualType Ty, QualType CatchHandlerType) override { return CatchTypeInfo{getAddrOfRTTIDescriptor(Ty), 0}; } bool shouldTypeidBeNullChecked(bool IsDeref, QualType SrcRecordTy) override; void EmitBadTypeidCall(CodeGenFunction &CGF) override; llvm::Value *EmitTypeid(CodeGenFunction &CGF, QualType SrcRecordTy, Address ThisPtr, llvm::Type *StdTypeInfoPtrTy) override; bool shouldDynamicCastCallBeNullChecked(bool SrcIsPtr, QualType SrcRecordTy) override; llvm::Value *EmitDynamicCastCall(CodeGenFunction &CGF, Address Value, QualType SrcRecordTy, QualType DestTy, QualType DestRecordTy, llvm::BasicBlock *CastEnd) override; llvm::Value *EmitDynamicCastToVoid(CodeGenFunction &CGF, Address Value, QualType SrcRecordTy, QualType DestTy) override; bool EmitBadCastCall(CodeGenFunction &CGF) override; llvm::Value * GetVirtualBaseClassOffset(CodeGenFunction &CGF, Address This, const CXXRecordDecl *ClassDecl, const CXXRecordDecl *BaseClassDecl) override; void EmitCXXConstructors(const CXXConstructorDecl *D) override; AddedStructorArgCounts buildStructorSignature(GlobalDecl GD, SmallVectorImpl<CanQualType> &ArgTys) override; bool useThunkForDtorVariant(const CXXDestructorDecl *Dtor, CXXDtorType DT) const override { // Itanium does not emit any destructor variant as an inline thunk. // Delegating may occur as an optimization, but all variants are either // emitted with external linkage or as linkonce if they are inline and used. return false; } void EmitCXXDestructors(const CXXDestructorDecl *D) override; void addImplicitStructorParams(CodeGenFunction &CGF, QualType &ResTy, FunctionArgList &Params) override; void EmitInstanceFunctionProlog(CodeGenFunction &CGF) override; AddedStructorArgs getImplicitConstructorArgs(CodeGenFunction &CGF, const CXXConstructorDecl *D, CXXCtorType Type, bool ForVirtualBase, bool Delegating) override; llvm::Value *getCXXDestructorImplicitParam(CodeGenFunction &CGF, const CXXDestructorDecl *DD, CXXDtorType Type, bool ForVirtualBase, bool Delegating) override; void EmitDestructorCall(CodeGenFunction &CGF, const CXXDestructorDecl *DD, CXXDtorType Type, bool ForVirtualBase, bool Delegating, Address This, QualType ThisTy) override; void emitVTableDefinitions(CodeGenVTables &CGVT, const CXXRecordDecl *RD) override; bool isVirtualOffsetNeededForVTableField(CodeGenFunction &CGF, CodeGenFunction::VPtr Vptr) override; bool doStructorsInitializeVPtrs(const CXXRecordDecl *VTableClass) override { return true; } llvm::Constant * getVTableAddressPoint(BaseSubobject Base, const CXXRecordDecl *VTableClass) override; llvm::Value *getVTableAddressPointInStructor( CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base, const CXXRecordDecl *NearestVBase) override; llvm::Value *getVTableAddressPointInStructorWithVTT( CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base, const CXXRecordDecl *NearestVBase); llvm::Constant * getVTableAddressPointForConstExpr(BaseSubobject Base, const CXXRecordDecl *VTableClass) override; llvm::GlobalVariable *getAddrOfVTable(const CXXRecordDecl *RD, CharUnits VPtrOffset) override; CGCallee getVirtualFunctionPointer(CodeGenFunction &CGF, GlobalDecl GD, Address This, llvm::Type *Ty, SourceLocation Loc) override; llvm::Value *EmitVirtualDestructorCall(CodeGenFunction &CGF, const CXXDestructorDecl *Dtor, CXXDtorType DtorType, Address This, DeleteOrMemberCallExpr E) override; void emitVirtualInheritanceTables(const CXXRecordDecl *RD) override; bool canSpeculativelyEmitVTable(const CXXRecordDecl *RD) const override; bool canSpeculativelyEmitVTableAsBaseClass(const CXXRecordDecl *RD) const; void setThunkLinkage(llvm::Function *Thunk, bool ForVTable, GlobalDecl GD, bool ReturnAdjustment) override { // Allow inlining of thunks by emitting them with available_externally // linkage together with vtables when needed. if (ForVTable && !Thunk->hasLocalLinkage()) Thunk->setLinkage(llvm::GlobalValue::AvailableExternallyLinkage); CGM.setGVProperties(Thunk, GD); } bool exportThunk() override { return true; } llvm::Value *performThisAdjustment(CodeGenFunction &CGF, Address This, const ThisAdjustment &TA) override; llvm::Value *performReturnAdjustment(CodeGenFunction &CGF, Address Ret, const ReturnAdjustment &RA) override; size_t getSrcArgforCopyCtor(const CXXConstructorDecl *, FunctionArgList &Args) const override { assert(!Args.empty() && "expected the arglist to not be empty!"); return Args.size() - 1; } StringRef GetPureVirtualCallName() override { return "__cxa_pure_virtual"; } StringRef GetDeletedVirtualCallName() override { return "__cxa_deleted_virtual"; } CharUnits getArrayCookieSizeImpl(QualType elementType) override; Address InitializeArrayCookie(CodeGenFunction &CGF, Address NewPtr, llvm::Value *NumElements, const CXXNewExpr *expr, QualType ElementType) override; llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF, Address allocPtr, CharUnits cookieSize) override; void EmitGuardedInit(CodeGenFunction &CGF, const VarDecl &D, llvm::GlobalVariable *DeclPtr, bool PerformInit) override; void registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D, llvm::FunctionCallee dtor, llvm::Constant *addr) override; llvm::Function *getOrCreateThreadLocalWrapper(const VarDecl *VD, llvm::Value *Val); void EmitThreadLocalInitFuncs( CodeGenModule &CGM, ArrayRef<const VarDecl *> CXXThreadLocals, ArrayRef<llvm::Function *> CXXThreadLocalInits, ArrayRef<const VarDecl *> CXXThreadLocalInitVars) override; /// Determine whether we will definitely emit this variable with a constant /// initializer, either because the language semantics demand it or because /// we know that the initializer is a constant. bool isEmittedWithConstantInitializer(const VarDecl *VD) const { VD = VD->getMostRecentDecl(); if (VD->hasAttr<ConstInitAttr>()) return true; // All later checks examine the initializer specified on the variable. If // the variable is weak, such examination would not be correct. if (VD->isWeak() || VD->hasAttr<SelectAnyAttr>()) return false; const VarDecl *InitDecl = VD->getInitializingDeclaration(); if (!InitDecl) return false; // If there's no initializer to run, this is constant initialization. if (!InitDecl->hasInit()) return true; // If we have the only definition, we don't need a thread wrapper if we // will emit the value as a constant. if (isUniqueGVALinkage(getContext().GetGVALinkageForVariable(VD))) return !VD->needsDestruction(getContext()) && InitDecl->evaluateValue(); // Otherwise, we need a thread wrapper unless we know that every // translation unit will emit the value as a constant. We rely on // ICE-ness not varying between translation units, which isn't actually // guaranteed by the standard but is necessary for sanity. return InitDecl->isInitKnownICE() && InitDecl->isInitICE(); } bool usesThreadWrapperFunction(const VarDecl *VD) const override { return !isEmittedWithConstantInitializer(VD) || VD->needsDestruction(getContext()); } LValue EmitThreadLocalVarDeclLValue(CodeGenFunction &CGF, const VarDecl *VD, QualType LValType) override; bool NeedsVTTParameter(GlobalDecl GD) override; /**************************** RTTI Uniqueness ******************************/ protected: /// Returns true if the ABI requires RTTI type_info objects to be unique /// across a program. virtual bool shouldRTTIBeUnique() const { return true; } public: /// What sort of unique-RTTI behavior should we use? enum RTTIUniquenessKind { /// We are guaranteeing, or need to guarantee, that the RTTI string /// is unique. RUK_Unique, /// We are not guaranteeing uniqueness for the RTTI string, so we /// can demote to hidden visibility but must use string comparisons. RUK_NonUniqueHidden, /// We are not guaranteeing uniqueness for the RTTI string, so we /// have to use string comparisons, but we also have to emit it with /// non-hidden visibility. RUK_NonUniqueVisible }; /// Return the required visibility status for the given type and linkage in /// the current ABI. RTTIUniquenessKind classifyRTTIUniqueness(QualType CanTy, llvm::GlobalValue::LinkageTypes Linkage) const; friend class ItaniumRTTIBuilder; void emitCXXStructor(GlobalDecl GD) override; std::pair<llvm::Value *, const CXXRecordDecl *> LoadVTablePtr(CodeGenFunction &CGF, Address This, const CXXRecordDecl *RD) override; private: bool hasAnyUnusedVirtualInlineFunction(const CXXRecordDecl *RD) const { const auto &VtableLayout = CGM.getItaniumVTableContext().getVTableLayout(RD); for (const auto &VtableComponent : VtableLayout.vtable_components()) { // Skip empty slot. if (!VtableComponent.isUsedFunctionPointerKind()) continue; const CXXMethodDecl *Method = VtableComponent.getFunctionDecl(); if (!Method->getCanonicalDecl()->isInlined()) continue; StringRef Name = CGM.getMangledName(VtableComponent.getGlobalDecl()); auto *Entry = CGM.GetGlobalValue(Name); // This checks if virtual inline function has already been emitted. // Note that it is possible that this inline function would be emitted // after trying to emit vtable speculatively. Because of this we do // an extra pass after emitting all deferred vtables to find and emit // these vtables opportunistically. if (!Entry || Entry->isDeclaration()) return true; } return false; } bool isVTableHidden(const CXXRecordDecl *RD) const { const auto &VtableLayout = CGM.getItaniumVTableContext().getVTableLayout(RD); for (const auto &VtableComponent : VtableLayout.vtable_components()) { if (VtableComponent.isRTTIKind()) { const CXXRecordDecl *RTTIDecl = VtableComponent.getRTTIDecl(); if (RTTIDecl->getVisibility() == Visibility::HiddenVisibility) return true; } else if (VtableComponent.isUsedFunctionPointerKind()) { const CXXMethodDecl *Method = VtableComponent.getFunctionDecl(); if (Method->getVisibility() == Visibility::HiddenVisibility && !Method->isDefined()) return true; } } return false; } }; class ARMCXXABI : public ItaniumCXXABI { public: ARMCXXABI(CodeGen::CodeGenModule &CGM) : ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true, /*UseARMGuardVarABI=*/true) {} bool HasThisReturn(GlobalDecl GD) const override { return (isa<CXXConstructorDecl>(GD.getDecl()) || ( isa<CXXDestructorDecl>(GD.getDecl()) && GD.getDtorType() != Dtor_Deleting)); } void EmitReturnFromThunk(CodeGenFunction &CGF, RValue RV, QualType ResTy) override; CharUnits getArrayCookieSizeImpl(QualType elementType) override; Address InitializeArrayCookie(CodeGenFunction &CGF, Address NewPtr, llvm::Value *NumElements, const CXXNewExpr *expr, QualType ElementType) override; llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF, Address allocPtr, CharUnits cookieSize) override; }; class iOS64CXXABI : public ARMCXXABI { public: iOS64CXXABI(CodeGen::CodeGenModule &CGM) : ARMCXXABI(CGM) { Use32BitVTableOffsetABI = true; } // ARM64 libraries are prepared for non-unique RTTI. bool shouldRTTIBeUnique() const override { return false; } }; class FuchsiaCXXABI final : public ItaniumCXXABI { public: explicit FuchsiaCXXABI(CodeGen::CodeGenModule &CGM) : ItaniumCXXABI(CGM) {} private: bool HasThisReturn(GlobalDecl GD) const override { return isa<CXXConstructorDecl>(GD.getDecl()) || (isa<CXXDestructorDecl>(GD.getDecl()) && GD.getDtorType() != Dtor_Deleting); } }; class WebAssemblyCXXABI final : public ItaniumCXXABI { public: explicit WebAssemblyCXXABI(CodeGen::CodeGenModule &CGM) : ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true, /*UseARMGuardVarABI=*/true) {} void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override; private: bool HasThisReturn(GlobalDecl GD) const override { return isa<CXXConstructorDecl>(GD.getDecl()) || (isa<CXXDestructorDecl>(GD.getDecl()) && GD.getDtorType() != Dtor_Deleting); } bool canCallMismatchedFunctionType() const override { return false; } }; class XLCXXABI final : public ItaniumCXXABI { public: explicit XLCXXABI(CodeGen::CodeGenModule &CGM) : ItaniumCXXABI(CGM) {} void registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D, llvm::FunctionCallee dtor, llvm::Constant *addr) override; bool useSinitAndSterm() const override { return true; } private: void emitCXXStermFinalizer(const VarDecl &D, llvm::Function *dtorStub, llvm::Constant *addr); }; } CodeGen::CGCXXABI *CodeGen::CreateItaniumCXXABI(CodeGenModule &CGM) { switch (CGM.getTarget().getCXXABI().getKind()) { // For IR-generation purposes, there's no significant difference // between the ARM and iOS ABIs. case TargetCXXABI::GenericARM: case TargetCXXABI::iOS: case TargetCXXABI::WatchOS: return new ARMCXXABI(CGM); case TargetCXXABI::iOS64: return new iOS64CXXABI(CGM); case TargetCXXABI::Fuchsia: return new FuchsiaCXXABI(CGM); // Note that AArch64 uses the generic ItaniumCXXABI class since it doesn't // include the other 32-bit ARM oddities: constructor/destructor return values // and array cookies. case TargetCXXABI::GenericAArch64: return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true, /*UseARMGuardVarABI=*/true); case TargetCXXABI::GenericMIPS: return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true); case TargetCXXABI::WebAssembly: return new WebAssemblyCXXABI(CGM); case TargetCXXABI::XL: return new XLCXXABI(CGM); case TargetCXXABI::GenericItanium: if (CGM.getContext().getTargetInfo().getTriple().getArch() == llvm::Triple::le32) { // For PNaCl, use ARM-style method pointers so that PNaCl code // does not assume anything about the alignment of function // pointers. return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true); } return new ItaniumCXXABI(CGM); case TargetCXXABI::Microsoft: llvm_unreachable("Microsoft ABI is not Itanium-based"); } llvm_unreachable("bad ABI kind"); } llvm::Type * ItaniumCXXABI::ConvertMemberPointerType(const MemberPointerType *MPT) { if (MPT->isMemberDataPointer()) return CGM.PtrDiffTy; return llvm::StructType::get(CGM.PtrDiffTy, CGM.PtrDiffTy); } /// In the Itanium and ARM ABIs, method pointers have the form: /// struct { ptrdiff_t ptr; ptrdiff_t adj; } memptr; /// /// In the Itanium ABI: /// - method pointers are virtual if (memptr.ptr & 1) is nonzero /// - the this-adjustment is (memptr.adj) /// - the virtual offset is (memptr.ptr - 1) /// /// In the ARM ABI: /// - method pointers are virtual if (memptr.adj & 1) is nonzero /// - the this-adjustment is (memptr.adj >> 1) /// - the virtual offset is (memptr.ptr) /// ARM uses 'adj' for the virtual flag because Thumb functions /// may be only single-byte aligned. /// /// If the member is virtual, the adjusted 'this' pointer points /// to a vtable pointer from which the virtual offset is applied. /// /// If the member is non-virtual, memptr.ptr is the address of /// the function to call. CGCallee ItaniumCXXABI::EmitLoadOfMemberFunctionPointer( CodeGenFunction &CGF, const Expr *E, Address ThisAddr, llvm::Value *&ThisPtrForCall, llvm::Value *MemFnPtr, const MemberPointerType *MPT) { CGBuilderTy &Builder = CGF.Builder; const FunctionProtoType *FPT = MPT->getPointeeType()->getAs<FunctionProtoType>(); auto *RD = cast<CXXRecordDecl>(MPT->getClass()->castAs<RecordType>()->getDecl()); llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType( CGM.getTypes().arrangeCXXMethodType(RD, FPT, /*FD=*/nullptr)); llvm::Constant *ptrdiff_1 = llvm::ConstantInt::get(CGM.PtrDiffTy, 1); llvm::BasicBlock *FnVirtual = CGF.createBasicBlock("memptr.virtual"); llvm::BasicBlock *FnNonVirtual = CGF.createBasicBlock("memptr.nonvirtual"); llvm::BasicBlock *FnEnd = CGF.createBasicBlock("memptr.end"); // Extract memptr.adj, which is in the second field. llvm::Value *RawAdj = Builder.CreateExtractValue(MemFnPtr, 1, "memptr.adj"); // Compute the true adjustment. llvm::Value *Adj = RawAdj; if (UseARMMethodPtrABI) Adj = Builder.CreateAShr(Adj, ptrdiff_1, "memptr.adj.shifted"); // Apply the adjustment and cast back to the original struct type // for consistency. llvm::Value *This = ThisAddr.getPointer(); llvm::Value *Ptr = Builder.CreateBitCast(This, Builder.getInt8PtrTy()); Ptr = Builder.CreateInBoundsGEP(Ptr, Adj); This = Builder.CreateBitCast(Ptr, This->getType(), "this.adjusted"); ThisPtrForCall = This; // Load the function pointer. llvm::Value *FnAsInt = Builder.CreateExtractValue(MemFnPtr, 0, "memptr.ptr"); // If the LSB in the function pointer is 1, the function pointer points to // a virtual function. llvm::Value *IsVirtual; if (UseARMMethodPtrABI) IsVirtual = Builder.CreateAnd(RawAdj, ptrdiff_1); else IsVirtual = Builder.CreateAnd(FnAsInt, ptrdiff_1); IsVirtual = Builder.CreateIsNotNull(IsVirtual, "memptr.isvirtual"); Builder.CreateCondBr(IsVirtual, FnVirtual, FnNonVirtual); // In the virtual path, the adjustment left 'This' pointing to the // vtable of the correct base subobject. The "function pointer" is an // offset within the vtable (+1 for the virtual flag on non-ARM). CGF.EmitBlock(FnVirtual); // Cast the adjusted this to a pointer to vtable pointer and load. llvm::Type *VTableTy = Builder.getInt8PtrTy(); CharUnits VTablePtrAlign = CGF.CGM.getDynamicOffsetAlignment(ThisAddr.getAlignment(), RD, CGF.getPointerAlign()); llvm::Value *VTable = CGF.GetVTablePtr(Address(This, VTablePtrAlign), VTableTy, RD); // Apply the offset. // On ARM64, to reserve extra space in virtual member function pointers, // we only pay attention to the low 32 bits of the offset. llvm::Value *VTableOffset = FnAsInt; if (!UseARMMethodPtrABI) VTableOffset = Builder.CreateSub(VTableOffset, ptrdiff_1); if (Use32BitVTableOffsetABI) { VTableOffset = Builder.CreateTrunc(VTableOffset, CGF.Int32Ty); VTableOffset = Builder.CreateZExt(VTableOffset, CGM.PtrDiffTy); } // Check the address of the function pointer if CFI on member function // pointers is enabled. llvm::Constant *CheckSourceLocation; llvm::Constant *CheckTypeDesc; bool ShouldEmitCFICheck = CGF.SanOpts.has(SanitizerKind::CFIMFCall) && CGM.HasHiddenLTOVisibility(RD); bool ShouldEmitVFEInfo = CGM.getCodeGenOpts().VirtualFunctionElimination && CGM.HasHiddenLTOVisibility(RD); bool ShouldEmitWPDInfo = CGM.getCodeGenOpts().WholeProgramVTables && // Don't insert type tests if we are forcing public std visibility. !CGM.HasLTOVisibilityPublicStd(RD); llvm::Value *VirtualFn = nullptr; { CodeGenFunction::SanitizerScope SanScope(&CGF); llvm::Value *TypeId = nullptr; llvm::Value *CheckResult = nullptr; if (ShouldEmitCFICheck || ShouldEmitVFEInfo || ShouldEmitWPDInfo) { // If doing CFI, VFE or WPD, we will need the metadata node to check // against. llvm::Metadata *MD = CGM.CreateMetadataIdentifierForVirtualMemPtrType(QualType(MPT, 0)); TypeId = llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD); } if (ShouldEmitVFEInfo) { llvm::Value *VFPAddr = Builder.CreateGEP(VTable, VTableOffset); // If doing VFE, load from the vtable with a type.checked.load intrinsic // call. Note that we use the GEP to calculate the address to load from // and pass 0 as the offset to the intrinsic. This is because every // vtable slot of the correct type is marked with matching metadata, and // we know that the load must be from one of these slots. llvm::Value *CheckedLoad = Builder.CreateCall( CGM.getIntrinsic(llvm::Intrinsic::type_checked_load), {VFPAddr, llvm::ConstantInt::get(CGM.Int32Ty, 0), TypeId}); CheckResult = Builder.CreateExtractValue(CheckedLoad, 1); VirtualFn = Builder.CreateExtractValue(CheckedLoad, 0); VirtualFn = Builder.CreateBitCast(VirtualFn, FTy->getPointerTo(), "memptr.virtualfn"); } else { // When not doing VFE, emit a normal load, as it allows more // optimisations than type.checked.load. if (ShouldEmitCFICheck || ShouldEmitWPDInfo) { llvm::Value *VFPAddr = Builder.CreateGEP(VTable, VTableOffset); CheckResult = Builder.CreateCall( CGM.getIntrinsic(llvm::Intrinsic::type_test), {Builder.CreateBitCast(VFPAddr, CGF.Int8PtrTy), TypeId}); } if (CGM.getItaniumVTableContext().isRelativeLayout()) { VirtualFn = CGF.Builder.CreateCall( CGM.getIntrinsic(llvm::Intrinsic::load_relative, {VTableOffset->getType()}), {VTable, VTableOffset}); VirtualFn = CGF.Builder.CreateBitCast(VirtualFn, FTy->getPointerTo()); } else { llvm::Value *VFPAddr = CGF.Builder.CreateGEP(VTable, VTableOffset); VFPAddr = CGF.Builder.CreateBitCast( VFPAddr, FTy->getPointerTo()->getPointerTo()); VirtualFn = CGF.Builder.CreateAlignedLoad( VFPAddr, CGF.getPointerAlign(), "memptr.virtualfn"); } } assert(VirtualFn && "Virtual fuction pointer not created!"); assert((!ShouldEmitCFICheck || !ShouldEmitVFEInfo || !ShouldEmitWPDInfo || CheckResult) && "Check result required but not created!"); if (ShouldEmitCFICheck) { // If doing CFI, emit the check. CheckSourceLocation = CGF.EmitCheckSourceLocation(E->getBeginLoc()); CheckTypeDesc = CGF.EmitCheckTypeDescriptor(QualType(MPT, 0)); llvm::Constant *StaticData[] = { llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_VMFCall), CheckSourceLocation, CheckTypeDesc, }; if (CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIMFCall)) { CGF.EmitTrapCheck(CheckResult); } else { llvm::Value *AllVtables = llvm::MetadataAsValue::get( CGM.getLLVMContext(), llvm::MDString::get(CGM.getLLVMContext(), "all-vtables")); llvm::Value *ValidVtable = Builder.CreateCall( CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables}); CGF.EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIMFCall), SanitizerHandler::CFICheckFail, StaticData, {VTable, ValidVtable}); } FnVirtual = Builder.GetInsertBlock(); } } // End of sanitizer scope CGF.EmitBranch(FnEnd); // In the non-virtual path, the function pointer is actually a // function pointer. CGF.EmitBlock(FnNonVirtual); llvm::Value *NonVirtualFn = Builder.CreateIntToPtr(FnAsInt, FTy->getPointerTo(), "memptr.nonvirtualfn"); // Check the function pointer if CFI on member function pointers is enabled. if (ShouldEmitCFICheck) { CXXRecordDecl *RD = MPT->getClass()->getAsCXXRecordDecl(); if (RD->hasDefinition()) { CodeGenFunction::SanitizerScope SanScope(&CGF); llvm::Constant *StaticData[] = { llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_NVMFCall), CheckSourceLocation, CheckTypeDesc, }; llvm::Value *Bit = Builder.getFalse(); llvm::Value *CastedNonVirtualFn = Builder.CreateBitCast(NonVirtualFn, CGF.Int8PtrTy); for (const CXXRecordDecl *Base : CGM.getMostBaseClasses(RD)) { llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType( getContext().getMemberPointerType( MPT->getPointeeType(), getContext().getRecordType(Base).getTypePtr())); llvm::Value *TypeId = llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD); llvm::Value *TypeTest = Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedNonVirtualFn, TypeId}); Bit = Builder.CreateOr(Bit, TypeTest); } CGF.EmitCheck(std::make_pair(Bit, SanitizerKind::CFIMFCall), SanitizerHandler::CFICheckFail, StaticData, {CastedNonVirtualFn, llvm::UndefValue::get(CGF.IntPtrTy)}); FnNonVirtual = Builder.GetInsertBlock(); } } // We're done. CGF.EmitBlock(FnEnd); llvm::PHINode *CalleePtr = Builder.CreatePHI(FTy->getPointerTo(), 2); CalleePtr->addIncoming(VirtualFn, FnVirtual); CalleePtr->addIncoming(NonVirtualFn, FnNonVirtual); CGCallee Callee(FPT, CalleePtr); return Callee; } /// Compute an l-value by applying the given pointer-to-member to a /// base object. llvm::Value *ItaniumCXXABI::EmitMemberDataPointerAddress( CodeGenFunction &CGF, const Expr *E, Address Base, llvm::Value *MemPtr, const MemberPointerType *MPT) { assert(MemPtr->getType() == CGM.PtrDiffTy); CGBuilderTy &Builder = CGF.Builder; // Cast to char*. Base = Builder.CreateElementBitCast(Base, CGF.Int8Ty); // Apply the offset, which we assume is non-null. llvm::Value *Addr = Builder.CreateInBoundsGEP(Base.getPointer(), MemPtr, "memptr.offset"); // Cast the address to the appropriate pointer type, adopting the // address space of the base pointer. llvm::Type *PType = CGF.ConvertTypeForMem(MPT->getPointeeType()) ->getPointerTo(Base.getAddressSpace()); return Builder.CreateBitCast(Addr, PType); } /// Perform a bitcast, derived-to-base, or base-to-derived member pointer /// conversion. /// /// Bitcast conversions are always a no-op under Itanium. /// /// Obligatory offset/adjustment diagram: /// <-- offset --> <-- adjustment --> /// |--------------------------|----------------------|--------------------| /// ^Derived address point ^Base address point ^Member address point /// /// So when converting a base member pointer to a derived member pointer, /// we add the offset to the adjustment because the address point has /// decreased; and conversely, when converting a derived MP to a base MP /// we subtract the offset from the adjustment because the address point /// has increased. /// /// The standard forbids (at compile time) conversion to and from /// virtual bases, which is why we don't have to consider them here. /// /// The standard forbids (at run time) casting a derived MP to a base /// MP when the derived MP does not point to a member of the base. /// This is why -1 is a reasonable choice for null data member /// pointers. llvm::Value * ItaniumCXXABI::EmitMemberPointerConversion(CodeGenFunction &CGF, const CastExpr *E, llvm::Value *src) { assert(E->getCastKind() == CK_DerivedToBaseMemberPointer || E->getCastKind() == CK_BaseToDerivedMemberPointer || E->getCastKind() == CK_ReinterpretMemberPointer); // Under Itanium, reinterprets don't require any additional processing. if (E->getCastKind() == CK_ReinterpretMemberPointer) return src; // Use constant emission if we can. if (isa<llvm::Constant>(src)) return EmitMemberPointerConversion(E, cast<llvm::Constant>(src)); llvm::Constant *adj = getMemberPointerAdjustment(E); if (!adj) return src; CGBuilderTy &Builder = CGF.Builder; bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer); const MemberPointerType *destTy = E->getType()->castAs<MemberPointerType>(); // For member data pointers, this is just a matter of adding the // offset if the source is non-null. if (destTy->isMemberDataPointer()) { llvm::Value *dst; if (isDerivedToBase) dst = Builder.CreateNSWSub(src, adj, "adj"); else dst = Builder.CreateNSWAdd(src, adj, "adj"); // Null check. llvm::Value *null = llvm::Constant::getAllOnesValue(src->getType()); llvm::Value *isNull = Builder.CreateICmpEQ(src, null, "memptr.isnull"); return Builder.CreateSelect(isNull, src, dst); } // The this-adjustment is left-shifted by 1 on ARM. if (UseARMMethodPtrABI) { uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue(); offset <<= 1; adj = llvm::ConstantInt::get(adj->getType(), offset); } llvm::Value *srcAdj = Builder.CreateExtractValue(src, 1, "src.adj"); llvm::Value *dstAdj; if (isDerivedToBase) dstAdj = Builder.CreateNSWSub(srcAdj, adj, "adj"); else dstAdj = Builder.CreateNSWAdd(srcAdj, adj, "adj"); return Builder.CreateInsertValue(src, dstAdj, 1); } llvm::Constant * ItaniumCXXABI::EmitMemberPointerConversion(const CastExpr *E, llvm::Constant *src) { assert(E->getCastKind() == CK_DerivedToBaseMemberPointer || E->getCastKind() == CK_BaseToDerivedMemberPointer || E->getCastKind() == CK_ReinterpretMemberPointer); // Under Itanium, reinterprets don't require any additional processing. if (E->getCastKind() == CK_ReinterpretMemberPointer) return src; // If the adjustment is trivial, we don't need to do anything. llvm::Constant *adj = getMemberPointerAdjustment(E); if (!adj) return src; bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer); const MemberPointerType *destTy = E->getType()->castAs<MemberPointerType>(); // For member data pointers, this is just a matter of adding the // offset if the source is non-null. if (destTy->isMemberDataPointer()) { // null maps to null. if (src->isAllOnesValue()) return src; if (isDerivedToBase) return llvm::ConstantExpr::getNSWSub(src, adj); else return llvm::ConstantExpr::getNSWAdd(src, adj); } // The this-adjustment is left-shifted by 1 on ARM. if (UseARMMethodPtrABI) { uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue(); offset <<= 1; adj = llvm::ConstantInt::get(adj->getType(), offset); } llvm::Constant *srcAdj = llvm::ConstantExpr::getExtractValue(src, 1); llvm::Constant *dstAdj; if (isDerivedToBase) dstAdj = llvm::ConstantExpr::getNSWSub(srcAdj, adj); else dstAdj = llvm::ConstantExpr::getNSWAdd(srcAdj, adj); return llvm::ConstantExpr::getInsertValue(src, dstAdj, 1); } llvm::Constant * ItaniumCXXABI::EmitNullMemberPointer(const MemberPointerType *MPT) { // Itanium C++ ABI 2.3: // A NULL pointer is represented as -1. if (MPT->isMemberDataPointer()) return llvm::ConstantInt::get(CGM.PtrDiffTy, -1ULL, /*isSigned=*/true); llvm::Constant *Zero = llvm::ConstantInt::get(CGM.PtrDiffTy, 0); llvm::Constant *Values[2] = { Zero, Zero }; return llvm::ConstantStruct::getAnon(Values); } llvm::Constant * ItaniumCXXABI::EmitMemberDataPointer(const MemberPointerType *MPT, CharUnits offset) { // Itanium C++ ABI 2.3: // A pointer to data member is an offset from the base address of // the class object containing it, represented as a ptrdiff_t return llvm::ConstantInt::get(CGM.PtrDiffTy, offset.getQuantity()); } llvm::Constant * ItaniumCXXABI::EmitMemberFunctionPointer(const CXXMethodDecl *MD) { return BuildMemberPointer(MD, CharUnits::Zero()); } llvm::Constant *ItaniumCXXABI::BuildMemberPointer(const CXXMethodDecl *MD, CharUnits ThisAdjustment) { assert(MD->isInstance() && "Member function must not be static!"); CodeGenTypes &Types = CGM.getTypes(); // Get the function pointer (or index if this is a virtual function). llvm::Constant *MemPtr[2]; if (MD->isVirtual()) { uint64_t Index = CGM.getItaniumVTableContext().getMethodVTableIndex(MD); uint64_t VTableOffset; if (CGM.getItaniumVTableContext().isRelativeLayout()) { // Multiply by 4-byte relative offsets. VTableOffset = Index * 4; } else { const ASTContext &Context = getContext(); CharUnits PointerWidth = Context.toCharUnitsFromBits( Context.getTargetInfo().getPointerWidth(0)); VTableOffset = Index * PointerWidth.getQuantity(); } if (UseARMMethodPtrABI) { // ARM C++ ABI 3.2.1: // This ABI specifies that adj contains twice the this // adjustment, plus 1 if the member function is virtual. The // least significant bit of adj then makes exactly the same // discrimination as the least significant bit of ptr does for // Itanium. MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset); MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy, 2 * ThisAdjustment.getQuantity() + 1); } else { // Itanium C++ ABI 2.3: // For a virtual function, [the pointer field] is 1 plus the // virtual table offset (in bytes) of the function, // represented as a ptrdiff_t. MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset + 1); MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy, ThisAdjustment.getQuantity()); } } else { const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>(); llvm::Type *Ty; // Check whether the function has a computable LLVM signature. if (Types.isFuncTypeConvertible(FPT)) { // The function has a computable LLVM signature; use the correct type. Ty = Types.GetFunctionType(Types.arrangeCXXMethodDeclaration(MD)); } else { // Use an arbitrary non-function type to tell GetAddrOfFunction that the // function type is incomplete. Ty = CGM.PtrDiffTy; } llvm::Constant *addr = CGM.GetAddrOfFunction(MD, Ty); MemPtr[0] = llvm::ConstantExpr::getPtrToInt(addr, CGM.PtrDiffTy); MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy, (UseARMMethodPtrABI ? 2 : 1) * ThisAdjustment.getQuantity()); } return llvm::ConstantStruct::getAnon(MemPtr); } llvm::Constant *ItaniumCXXABI::EmitMemberPointer(const APValue &MP, QualType MPType) { const MemberPointerType *MPT = MPType->castAs<MemberPointerType>(); const ValueDecl *MPD = MP.getMemberPointerDecl(); if (!MPD) return EmitNullMemberPointer(MPT); CharUnits ThisAdjustment = getMemberPointerPathAdjustment(MP); if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MPD)) return BuildMemberPointer(MD, ThisAdjustment); CharUnits FieldOffset = getContext().toCharUnitsFromBits(getContext().getFieldOffset(MPD)); return EmitMemberDataPointer(MPT, ThisAdjustment + FieldOffset); } /// The comparison algorithm is pretty easy: the member pointers are /// the same if they're either bitwise identical *or* both null. /// /// ARM is different here only because null-ness is more complicated. llvm::Value * ItaniumCXXABI::EmitMemberPointerComparison(CodeGenFunction &CGF, llvm::Value *L, llvm::Value *R, const MemberPointerType *MPT, bool Inequality) { CGBuilderTy &Builder = CGF.Builder; llvm::ICmpInst::Predicate Eq; llvm::Instruction::BinaryOps And, Or; if (Inequality) { Eq = llvm::ICmpInst::ICMP_NE; And = llvm::Instruction::Or; Or = llvm::Instruction::And; } else { Eq = llvm::ICmpInst::ICMP_EQ; And = llvm::Instruction::And; Or = llvm::Instruction::Or; } // Member data pointers are easy because there's a unique null // value, so it just comes down to bitwise equality. if (MPT->isMemberDataPointer()) return Builder.CreateICmp(Eq, L, R); // For member function pointers, the tautologies are more complex. // The Itanium tautology is: // (L == R) <==> (L.ptr == R.ptr && (L.ptr == 0 || L.adj == R.adj)) // The ARM tautology is: // (L == R) <==> (L.ptr == R.ptr && // (L.adj == R.adj || // (L.ptr == 0 && ((L.adj|R.adj) & 1) == 0))) // The inequality tautologies have exactly the same structure, except // applying De Morgan's laws. llvm::Value *LPtr = Builder.CreateExtractValue(L, 0, "lhs.memptr.ptr"); llvm::Value *RPtr = Builder.CreateExtractValue(R, 0, "rhs.memptr.ptr"); // This condition tests whether L.ptr == R.ptr. This must always be // true for equality to hold. llvm::Value *PtrEq = Builder.CreateICmp(Eq, LPtr, RPtr, "cmp.ptr"); // This condition, together with the assumption that L.ptr == R.ptr, // tests whether the pointers are both null. ARM imposes an extra // condition. llvm::Value *Zero = llvm::Constant::getNullValue(LPtr->getType()); llvm::Value *EqZero = Builder.CreateICmp(Eq, LPtr, Zero, "cmp.ptr.null"); // This condition tests whether L.adj == R.adj. If this isn't // true, the pointers are unequal unless they're both null. llvm::Value *LAdj = Builder.CreateExtractValue(L, 1, "lhs.memptr.adj"); llvm::Value *RAdj = Builder.CreateExtractValue(R, 1, "rhs.memptr.adj"); llvm::Value *AdjEq = Builder.CreateICmp(Eq, LAdj, RAdj, "cmp.adj"); // Null member function pointers on ARM clear the low bit of Adj, // so the zero condition has to check that neither low bit is set. if (UseARMMethodPtrABI) { llvm::Value *One = llvm::ConstantInt::get(LPtr->getType(), 1); // Compute (l.adj | r.adj) & 1 and test it against zero. llvm::Value *OrAdj = Builder.CreateOr(LAdj, RAdj, "or.adj"); llvm::Value *OrAdjAnd1 = Builder.CreateAnd(OrAdj, One); llvm::Value *OrAdjAnd1EqZero = Builder.CreateICmp(Eq, OrAdjAnd1, Zero, "cmp.or.adj"); EqZero = Builder.CreateBinOp(And, EqZero, OrAdjAnd1EqZero); } // Tie together all our conditions. llvm::Value *Result = Builder.CreateBinOp(Or, EqZero, AdjEq); Result = Builder.CreateBinOp(And, PtrEq, Result, Inequality ? "memptr.ne" : "memptr.eq"); return Result; } llvm::Value * ItaniumCXXABI::EmitMemberPointerIsNotNull(CodeGenFunction &CGF, llvm::Value *MemPtr, const MemberPointerType *MPT) { CGBuilderTy &Builder = CGF.Builder; /// For member data pointers, this is just a check against -1. if (MPT->isMemberDataPointer()) { assert(MemPtr->getType() == CGM.PtrDiffTy); llvm::Value *NegativeOne = llvm::Constant::getAllOnesValue(MemPtr->getType()); return Builder.CreateICmpNE(MemPtr, NegativeOne, "memptr.tobool"); } // In Itanium, a member function pointer is not null if 'ptr' is not null. llvm::Value *Ptr = Builder.CreateExtractValue(MemPtr, 0, "memptr.ptr"); llvm::Constant *Zero = llvm::ConstantInt::get(Ptr->getType(), 0); llvm::Value *Result = Builder.CreateICmpNE(Ptr, Zero, "memptr.tobool"); // On ARM, a member function pointer is also non-null if the low bit of 'adj' // (the virtual bit) is set. if (UseARMMethodPtrABI) { llvm::Constant *One = llvm::ConstantInt::get(Ptr->getType(), 1); llvm::Value *Adj = Builder.CreateExtractValue(MemPtr, 1, "memptr.adj"); llvm::Value *VirtualBit = Builder.CreateAnd(Adj, One, "memptr.virtualbit"); llvm::Value *IsVirtual = Builder.CreateICmpNE(VirtualBit, Zero, "memptr.isvirtual"); Result = Builder.CreateOr(Result, IsVirtual); } return Result; } bool ItaniumCXXABI::classifyReturnType(CGFunctionInfo &FI) const { const CXXRecordDecl *RD = FI.getReturnType()->getAsCXXRecordDecl(); if (!RD) return false; // If C++ prohibits us from making a copy, return by address. if (!RD->canPassInRegisters()) { auto Align = CGM.getContext().getTypeAlignInChars(FI.getReturnType()); FI.getReturnInfo() = ABIArgInfo::getIndirect(Align, /*ByVal=*/false); return true; } return false; } /// The Itanium ABI requires non-zero initialization only for data /// member pointers, for which '0' is a valid offset. bool ItaniumCXXABI::isZeroInitializable(const MemberPointerType *MPT) { return MPT->isMemberFunctionPointer(); } /// The Itanium ABI always places an offset to the complete object /// at entry -2 in the vtable. void ItaniumCXXABI::emitVirtualObjectDelete(CodeGenFunction &CGF, const CXXDeleteExpr *DE, Address Ptr, QualType ElementType, const CXXDestructorDecl *Dtor) { bool UseGlobalDelete = DE->isGlobalDelete(); if (UseGlobalDelete) { // Derive the complete-object pointer, which is what we need // to pass to the deallocation function. // Grab the vtable pointer as an intptr_t*. auto *ClassDecl = cast<CXXRecordDecl>(ElementType->castAs<RecordType>()->getDecl()); llvm::Value *VTable = CGF.GetVTablePtr(Ptr, CGF.IntPtrTy->getPointerTo(), ClassDecl); // Track back to entry -2 and pull out the offset there. llvm::Value *OffsetPtr = CGF.Builder.CreateConstInBoundsGEP1_64( VTable, -2, "complete-offset.ptr"); llvm::Value *Offset = CGF.Builder.CreateAlignedLoad(OffsetPtr, CGF.getPointerAlign()); // Apply the offset. llvm::Value *CompletePtr = CGF.Builder.CreateBitCast(Ptr.getPointer(), CGF.Int8PtrTy); CompletePtr = CGF.Builder.CreateInBoundsGEP(CompletePtr, Offset); // If we're supposed to call the global delete, make sure we do so // even if the destructor throws. CGF.pushCallObjectDeleteCleanup(DE->getOperatorDelete(), CompletePtr, ElementType); } // FIXME: Provide a source location here even though there's no // CXXMemberCallExpr for dtor call. CXXDtorType DtorType = UseGlobalDelete ? Dtor_Complete : Dtor_Deleting; EmitVirtualDestructorCall(CGF, Dtor, DtorType, Ptr, DE); if (UseGlobalDelete) CGF.PopCleanupBlock(); } void ItaniumCXXABI::emitRethrow(CodeGenFunction &CGF, bool isNoReturn) { // void __cxa_rethrow(); llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false); llvm::FunctionCallee Fn = CGM.CreateRuntimeFunction(FTy, "__cxa_rethrow"); if (isNoReturn) CGF.EmitNoreturnRuntimeCallOrInvoke(Fn, None); else CGF.EmitRuntimeCallOrInvoke(Fn); } static llvm::FunctionCallee getAllocateExceptionFn(CodeGenModule &CGM) { // void *__cxa_allocate_exception(size_t thrown_size); llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.Int8PtrTy, CGM.SizeTy, /*isVarArg=*/false); return CGM.CreateRuntimeFunction(FTy, "__cxa_allocate_exception"); } static llvm::FunctionCallee getThrowFn(CodeGenModule &CGM) { // void __cxa_throw(void *thrown_exception, std::type_info *tinfo, // void (*dest) (void *)); llvm::Type *Args[3] = { CGM.Int8PtrTy, CGM.Int8PtrTy, CGM.Int8PtrTy }; llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, Args, /*isVarArg=*/false); return CGM.CreateRuntimeFunction(FTy, "__cxa_throw"); } void ItaniumCXXABI::emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) { QualType ThrowType = E->getSubExpr()->getType(); // Now allocate the exception object. llvm::Type *SizeTy = CGF.ConvertType(getContext().getSizeType()); uint64_t TypeSize = getContext().getTypeSizeInChars(ThrowType).getQuantity(); llvm::FunctionCallee AllocExceptionFn = getAllocateExceptionFn(CGM); llvm::CallInst *ExceptionPtr = CGF.EmitNounwindRuntimeCall( AllocExceptionFn, llvm::ConstantInt::get(SizeTy, TypeSize), "exception"); CharUnits ExnAlign = CGF.getContext().getExnObjectAlignment(); CGF.EmitAnyExprToExn(E->getSubExpr(), Address(ExceptionPtr, ExnAlign)); // Now throw the exception. llvm::Constant *TypeInfo = CGM.GetAddrOfRTTIDescriptor(ThrowType, /*ForEH=*/true); // The address of the destructor. If the exception type has a // trivial destructor (or isn't a record), we just pass null. llvm::Constant *Dtor = nullptr; if (const RecordType *RecordTy = ThrowType->getAs<RecordType>()) { CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordTy->getDecl()); if (!Record->hasTrivialDestructor()) { CXXDestructorDecl *DtorD = Record->getDestructor(); Dtor = CGM.getAddrOfCXXStructor(GlobalDecl(DtorD, Dtor_Complete)); Dtor = llvm::ConstantExpr::getBitCast(Dtor, CGM.Int8PtrTy); } } if (!Dtor) Dtor = llvm::Constant::getNullValue(CGM.Int8PtrTy); llvm::Value *args[] = { ExceptionPtr, TypeInfo, Dtor }; CGF.EmitNoreturnRuntimeCallOrInvoke(getThrowFn(CGM), args); } static llvm::FunctionCallee getItaniumDynamicCastFn(CodeGenFunction &CGF) { // void *__dynamic_cast(const void *sub, // const abi::__class_type_info *src, // const abi::__class_type_info *dst, // std::ptrdiff_t src2dst_offset); llvm::Type *Int8PtrTy = CGF.Int8PtrTy; llvm::Type *PtrDiffTy = CGF.ConvertType(CGF.getContext().getPointerDiffType()); llvm::Type *Args[4] = { Int8PtrTy, Int8PtrTy, Int8PtrTy, PtrDiffTy }; llvm::FunctionType *FTy = llvm::FunctionType::get(Int8PtrTy, Args, false); // Mark the function as nounwind readonly. llvm::Attribute::AttrKind FuncAttrs[] = { llvm::Attribute::NoUnwind, llvm::Attribute::ReadOnly }; llvm::AttributeList Attrs = llvm::AttributeList::get( CGF.getLLVMContext(), llvm::AttributeList::FunctionIndex, FuncAttrs); return CGF.CGM.CreateRuntimeFunction(FTy, "__dynamic_cast", Attrs); } static llvm::FunctionCallee getBadCastFn(CodeGenFunction &CGF) { // void __cxa_bad_cast(); llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false); return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_cast"); } /// Compute the src2dst_offset hint as described in the /// Itanium C++ ABI [2.9.7] static CharUnits computeOffsetHint(ASTContext &Context, const CXXRecordDecl *Src, const CXXRecordDecl *Dst) { CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true, /*DetectVirtual=*/false); // If Dst is not derived from Src we can skip the whole computation below and // return that Src is not a public base of Dst. Record all inheritance paths. if (!Dst->isDerivedFrom(Src, Paths)) return CharUnits::fromQuantity(-2ULL); unsigned NumPublicPaths = 0; CharUnits Offset; // Now walk all possible inheritance paths. for (const CXXBasePath &Path : Paths) { if (Path.Access != AS_public) // Ignore non-public inheritance. continue; ++NumPublicPaths; for (const CXXBasePathElement &PathElement : Path) { // If the path contains a virtual base class we can't give any hint. // -1: no hint. if (PathElement.Base->isVirtual()) return CharUnits::fromQuantity(-1ULL); if (NumPublicPaths > 1) // Won't use offsets, skip computation. continue; // Accumulate the base class offsets. const ASTRecordLayout &L = Context.getASTRecordLayout(PathElement.Class); Offset += L.getBaseClassOffset( PathElement.Base->getType()->getAsCXXRecordDecl()); } } // -2: Src is not a public base of Dst. if (NumPublicPaths == 0) return CharUnits::fromQuantity(-2ULL); // -3: Src is a multiple public base type but never a virtual base type. if (NumPublicPaths > 1) return CharUnits::fromQuantity(-3ULL); // Otherwise, the Src type is a unique public nonvirtual base type of Dst. // Return the offset of Src from the origin of Dst. return Offset; } static llvm::FunctionCallee getBadTypeidFn(CodeGenFunction &CGF) { // void __cxa_bad_typeid(); llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false); return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid"); } bool ItaniumCXXABI::shouldTypeidBeNullChecked(bool IsDeref, QualType SrcRecordTy) { return IsDeref; } void ItaniumCXXABI::EmitBadTypeidCall(CodeGenFunction &CGF) { llvm::FunctionCallee Fn = getBadTypeidFn(CGF); llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn); Call->setDoesNotReturn(); CGF.Builder.CreateUnreachable(); } llvm::Value *ItaniumCXXABI::EmitTypeid(CodeGenFunction &CGF, QualType SrcRecordTy, Address ThisPtr, llvm::Type *StdTypeInfoPtrTy) { auto *ClassDecl = cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl()); llvm::Value *Value = CGF.GetVTablePtr(ThisPtr, StdTypeInfoPtrTy->getPointerTo(), ClassDecl); if (CGM.getItaniumVTableContext().isRelativeLayout()) { // Load the type info. Value = CGF.Builder.CreateBitCast(Value, CGM.Int8PtrTy); Value = CGF.Builder.CreateCall( CGM.getIntrinsic(llvm::Intrinsic::load_relative, {CGM.Int32Ty}), {Value, llvm::ConstantInt::get(CGM.Int32Ty, -4)}); // Setup to dereference again since this is a proxy we accessed. Value = CGF.Builder.CreateBitCast(Value, StdTypeInfoPtrTy->getPointerTo()); } else { // Load the type info. Value = CGF.Builder.CreateConstInBoundsGEP1_64(Value, -1ULL); } return CGF.Builder.CreateAlignedLoad(Value, CGF.getPointerAlign()); } bool ItaniumCXXABI::shouldDynamicCastCallBeNullChecked(bool SrcIsPtr, QualType SrcRecordTy) { return SrcIsPtr; } llvm::Value *ItaniumCXXABI::EmitDynamicCastCall( CodeGenFunction &CGF, Address ThisAddr, QualType SrcRecordTy, QualType DestTy, QualType DestRecordTy, llvm::BasicBlock *CastEnd) { llvm::Type *PtrDiffLTy = CGF.ConvertType(CGF.getContext().getPointerDiffType()); llvm::Type *DestLTy = CGF.ConvertType(DestTy); llvm::Value *SrcRTTI = CGF.CGM.GetAddrOfRTTIDescriptor(SrcRecordTy.getUnqualifiedType()); llvm::Value *DestRTTI = CGF.CGM.GetAddrOfRTTIDescriptor(DestRecordTy.getUnqualifiedType()); // Compute the offset hint. const CXXRecordDecl *SrcDecl = SrcRecordTy->getAsCXXRecordDecl(); const CXXRecordDecl *DestDecl = DestRecordTy->getAsCXXRecordDecl(); llvm::Value *OffsetHint = llvm::ConstantInt::get( PtrDiffLTy, computeOffsetHint(CGF.getContext(), SrcDecl, DestDecl).getQuantity()); // Emit the call to __dynamic_cast. llvm::Value *Value = ThisAddr.getPointer(); Value = CGF.EmitCastToVoidPtr(Value); llvm::Value *args[] = {Value, SrcRTTI, DestRTTI, OffsetHint}; Value = CGF.EmitNounwindRuntimeCall(getItaniumDynamicCastFn(CGF), args); Value = CGF.Builder.CreateBitCast(Value, DestLTy); /// C++ [expr.dynamic.cast]p9: /// A failed cast to reference type throws std::bad_cast if (DestTy->isReferenceType()) { llvm::BasicBlock *BadCastBlock = CGF.createBasicBlock("dynamic_cast.bad_cast"); llvm::Value *IsNull = CGF.Builder.CreateIsNull(Value); CGF.Builder.CreateCondBr(IsNull, BadCastBlock, CastEnd); CGF.EmitBlock(BadCastBlock); EmitBadCastCall(CGF); } return Value; } llvm::Value *ItaniumCXXABI::EmitDynamicCastToVoid(CodeGenFunction &CGF, Address ThisAddr, QualType SrcRecordTy, QualType DestTy) { llvm::Type *DestLTy = CGF.ConvertType(DestTy); auto *ClassDecl = cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl()); llvm::Value *OffsetToTop; if (CGM.getItaniumVTableContext().isRelativeLayout()) { // Get the vtable pointer. llvm::Value *VTable = CGF.GetVTablePtr(ThisAddr, CGM.Int32Ty->getPointerTo(), ClassDecl); // Get the offset-to-top from the vtable. OffsetToTop = CGF.Builder.CreateConstInBoundsGEP1_32(/*Type=*/nullptr, VTable, -2U); OffsetToTop = CGF.Builder.CreateAlignedLoad( OffsetToTop, CharUnits::fromQuantity(4), "offset.to.top"); } else { llvm::Type *PtrDiffLTy = CGF.ConvertType(CGF.getContext().getPointerDiffType()); // Get the vtable pointer. llvm::Value *VTable = CGF.GetVTablePtr(ThisAddr, PtrDiffLTy->getPointerTo(), ClassDecl); // Get the offset-to-top from the vtable. OffsetToTop = CGF.Builder.CreateConstInBoundsGEP1_64(VTable, -2ULL); OffsetToTop = CGF.Builder.CreateAlignedLoad( OffsetToTop, CGF.getPointerAlign(), "offset.to.top"); } // Finally, add the offset to the pointer. llvm::Value *Value = ThisAddr.getPointer(); Value = CGF.EmitCastToVoidPtr(Value); Value = CGF.Builder.CreateInBoundsGEP(Value, OffsetToTop); return CGF.Builder.CreateBitCast(Value, DestLTy); } bool ItaniumCXXABI::EmitBadCastCall(CodeGenFunction &CGF) { llvm::FunctionCallee Fn = getBadCastFn(CGF); llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn); Call->setDoesNotReturn(); CGF.Builder.CreateUnreachable(); return true; } llvm::Value * ItaniumCXXABI::GetVirtualBaseClassOffset(CodeGenFunction &CGF, Address This, const CXXRecordDecl *ClassDecl, const CXXRecordDecl *BaseClassDecl) { llvm::Value *VTablePtr = CGF.GetVTablePtr(This, CGM.Int8PtrTy, ClassDecl); CharUnits VBaseOffsetOffset = CGM.getItaniumVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); llvm::Value *VBaseOffsetPtr = CGF.Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(), "vbase.offset.ptr"); llvm::Value *VBaseOffset; if (CGM.getItaniumVTableContext().isRelativeLayout()) { VBaseOffsetPtr = CGF.Builder.CreateBitCast(VBaseOffsetPtr, CGF.Int32Ty->getPointerTo()); VBaseOffset = CGF.Builder.CreateAlignedLoad( VBaseOffsetPtr, CharUnits::fromQuantity(4), "vbase.offset"); } else { VBaseOffsetPtr = CGF.Builder.CreateBitCast(VBaseOffsetPtr, CGM.PtrDiffTy->getPointerTo()); VBaseOffset = CGF.Builder.CreateAlignedLoad( VBaseOffsetPtr, CGF.getPointerAlign(), "vbase.offset"); } return VBaseOffset; } void ItaniumCXXABI::EmitCXXConstructors(const CXXConstructorDecl *D) { // Just make sure we're in sync with TargetCXXABI. assert(CGM.getTarget().getCXXABI().hasConstructorVariants()); // The constructor used for constructing this as a base class; // ignores virtual bases. CGM.EmitGlobal(GlobalDecl(D, Ctor_Base)); // The constructor used for constructing this as a complete class; // constructs the virtual bases, then calls the base constructor. if (!D->getParent()->isAbstract()) { // We don't need to emit the complete ctor if the class is abstract. CGM.EmitGlobal(GlobalDecl(D, Ctor_Complete)); } } CGCXXABI::AddedStructorArgCounts ItaniumCXXABI::buildStructorSignature(GlobalDecl GD, SmallVectorImpl<CanQualType> &ArgTys) { ASTContext &Context = getContext(); // All parameters are already in place except VTT, which goes after 'this'. // These are Clang types, so we don't need to worry about sret yet. // Check if we need to add a VTT parameter (which has type void **). if ((isa<CXXConstructorDecl>(GD.getDecl()) ? GD.getCtorType() == Ctor_Base : GD.getDtorType() == Dtor_Base) && cast<CXXMethodDecl>(GD.getDecl())->getParent()->getNumVBases() != 0) { ArgTys.insert(ArgTys.begin() + 1, Context.getPointerType(Context.VoidPtrTy)); return AddedStructorArgCounts::prefix(1); } return AddedStructorArgCounts{}; } void ItaniumCXXABI::EmitCXXDestructors(const CXXDestructorDecl *D) { // The destructor used for destructing this as a base class; ignores // virtual bases. CGM.EmitGlobal(GlobalDecl(D, Dtor_Base)); // The destructor used for destructing this as a most-derived class; // call the base destructor and then destructs any virtual bases. CGM.EmitGlobal(GlobalDecl(D, Dtor_Complete)); // The destructor in a virtual table is always a 'deleting' // destructor, which calls the complete destructor and then uses the // appropriate operator delete. if (D->isVirtual()) CGM.EmitGlobal(GlobalDecl(D, Dtor_Deleting)); } void ItaniumCXXABI::addImplicitStructorParams(CodeGenFunction &CGF, QualType &ResTy, FunctionArgList &Params) { const CXXMethodDecl *MD = cast<CXXMethodDecl>(CGF.CurGD.getDecl()); assert(isa<CXXConstructorDecl>(MD) || isa<CXXDestructorDecl>(MD)); // Check if we need a VTT parameter as well. if (NeedsVTTParameter(CGF.CurGD)) { ASTContext &Context = getContext(); // FIXME: avoid the fake decl QualType T = Context.getPointerType(Context.VoidPtrTy); auto *VTTDecl = ImplicitParamDecl::Create( Context, /*DC=*/nullptr, MD->getLocation(), &Context.Idents.get("vtt"), T, ImplicitParamDecl::CXXVTT); Params.insert(Params.begin() + 1, VTTDecl); getStructorImplicitParamDecl(CGF) = VTTDecl; } } void ItaniumCXXABI::EmitInstanceFunctionProlog(CodeGenFunction &CGF) { // Naked functions have no prolog. if (CGF.CurFuncDecl && CGF.CurFuncDecl->hasAttr<NakedAttr>()) return; /// Initialize the 'this' slot. In the Itanium C++ ABI, no prologue /// adjustments are required, because they are all handled by thunks. setCXXABIThisValue(CGF, loadIncomingCXXThis(CGF)); /// Initialize the 'vtt' slot if needed. if (getStructorImplicitParamDecl(CGF)) { getStructorImplicitParamValue(CGF) = CGF.Builder.CreateLoad( CGF.GetAddrOfLocalVar(getStructorImplicitParamDecl(CGF)), "vtt"); } /// If this is a function that the ABI specifies returns 'this', initialize /// the return slot to 'this' at the start of the function. /// /// Unlike the setting of return types, this is done within the ABI /// implementation instead of by clients of CGCXXABI because: /// 1) getThisValue is currently protected /// 2) in theory, an ABI could implement 'this' returns some other way; /// HasThisReturn only specifies a contract, not the implementation if (HasThisReturn(CGF.CurGD)) CGF.Builder.CreateStore(getThisValue(CGF), CGF.ReturnValue); } CGCXXABI::AddedStructorArgs ItaniumCXXABI::getImplicitConstructorArgs( CodeGenFunction &CGF, const CXXConstructorDecl *D, CXXCtorType Type, bool ForVirtualBase, bool Delegating) { if (!NeedsVTTParameter(GlobalDecl(D, Type))) return AddedStructorArgs{}; // Insert the implicit 'vtt' argument as the second argument. llvm::Value *VTT = CGF.GetVTTParameter(GlobalDecl(D, Type), ForVirtualBase, Delegating); QualType VTTTy = getContext().getPointerType(getContext().VoidPtrTy); return AddedStructorArgs::prefix({{VTT, VTTTy}}); } llvm::Value *ItaniumCXXABI::getCXXDestructorImplicitParam( CodeGenFunction &CGF, const CXXDestructorDecl *DD, CXXDtorType Type, bool ForVirtualBase, bool Delegating) { GlobalDecl GD(DD, Type); return CGF.GetVTTParameter(GD, ForVirtualBase, Delegating); } void ItaniumCXXABI::EmitDestructorCall(CodeGenFunction &CGF, const CXXDestructorDecl *DD, CXXDtorType Type, bool ForVirtualBase, bool Delegating, Address This, QualType ThisTy) { GlobalDecl GD(DD, Type); llvm::Value *VTT = getCXXDestructorImplicitParam(CGF, DD, Type, ForVirtualBase, Delegating); QualType VTTTy = getContext().getPointerType(getContext().VoidPtrTy); CGCallee Callee; if (getContext().getLangOpts().AppleKext && Type != Dtor_Base && DD->isVirtual()) Callee = CGF.BuildAppleKextVirtualDestructorCall(DD, Type, DD->getParent()); else Callee = CGCallee::forDirect(CGM.getAddrOfCXXStructor(GD), GD); CGF.EmitCXXDestructorCall(GD, Callee, This.getPointer(), ThisTy, VTT, VTTTy, nullptr); } void ItaniumCXXABI::emitVTableDefinitions(CodeGenVTables &CGVT, const CXXRecordDecl *RD) { llvm::GlobalVariable *VTable = getAddrOfVTable(RD, CharUnits()); if (VTable->hasInitializer()) return; ItaniumVTableContext &VTContext = CGM.getItaniumVTableContext(); const VTableLayout &VTLayout = VTContext.getVTableLayout(RD); llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD); llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(CGM.getContext().getTagDeclType(RD)); // Create and set the initializer. ConstantInitBuilder builder(CGM); auto components = builder.beginStruct(); CGVT.createVTableInitializer(components, VTLayout, RTTI, llvm::GlobalValue::isLocalLinkage(Linkage)); components.finishAndSetAsInitializer(VTable); // Set the correct linkage. VTable->setLinkage(Linkage); if (CGM.supportsCOMDAT() && VTable->isWeakForLinker()) VTable->setComdat(CGM.getModule().getOrInsertComdat(VTable->getName())); // Set the right visibility. CGM.setGVProperties(VTable, RD); // If this is the magic class __cxxabiv1::__fundamental_type_info, // we will emit the typeinfo for the fundamental types. This is the // same behaviour as GCC. const DeclContext *DC = RD->getDeclContext(); if (RD->getIdentifier() && RD->getIdentifier()->isStr("__fundamental_type_info") && isa<NamespaceDecl>(DC) && cast<NamespaceDecl>(DC)->getIdentifier() && cast<NamespaceDecl>(DC)->getIdentifier()->isStr("__cxxabiv1") && DC->getParent()->isTranslationUnit()) EmitFundamentalRTTIDescriptors(RD); if (!VTable->isDeclarationForLinker()) CGM.EmitVTableTypeMetadata(RD, VTable, VTLayout); if (VTContext.isRelativeLayout() && !VTable->isDSOLocal()) CGVT.GenerateRelativeVTableAlias(VTable, VTable->getName()); } bool ItaniumCXXABI::isVirtualOffsetNeededForVTableField( CodeGenFunction &CGF, CodeGenFunction::VPtr Vptr) { if (Vptr.NearestVBase == nullptr) return false; return NeedsVTTParameter(CGF.CurGD); } llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructor( CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base, const CXXRecordDecl *NearestVBase) { if ((Base.getBase()->getNumVBases() || NearestVBase != nullptr) && NeedsVTTParameter(CGF.CurGD)) { return getVTableAddressPointInStructorWithVTT(CGF, VTableClass, Base, NearestVBase); } return getVTableAddressPoint(Base, VTableClass); } llvm::Constant * ItaniumCXXABI::getVTableAddressPoint(BaseSubobject Base, const CXXRecordDecl *VTableClass) { llvm::GlobalValue *VTable = getAddrOfVTable(VTableClass, CharUnits()); // Find the appropriate vtable within the vtable group, and the address point // within that vtable. VTableLayout::AddressPointLocation AddressPoint = CGM.getItaniumVTableContext() .getVTableLayout(VTableClass) .getAddressPoint(Base); llvm::Value *Indices[] = { llvm::ConstantInt::get(CGM.Int32Ty, 0), llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.VTableIndex), llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.AddressPointIndex), }; return llvm::ConstantExpr::getGetElementPtr(VTable->getValueType(), VTable, Indices, /*InBounds=*/true, /*InRangeIndex=*/1); } llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructorWithVTT( CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base, const CXXRecordDecl *NearestVBase) { assert((Base.getBase()->getNumVBases() || NearestVBase != nullptr) && NeedsVTTParameter(CGF.CurGD) && "This class doesn't have VTT"); // Get the secondary vpointer index. uint64_t VirtualPointerIndex = CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); /// Load the VTT. llvm::Value *VTT = CGF.LoadCXXVTT(); if (VirtualPointerIndex) VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); // And load the address point from the VTT. return CGF.Builder.CreateAlignedLoad(VTT, CGF.getPointerAlign()); } llvm::Constant *ItaniumCXXABI::getVTableAddressPointForConstExpr( BaseSubobject Base, const CXXRecordDecl *VTableClass) { return getVTableAddressPoint(Base, VTableClass); } llvm::GlobalVariable *ItaniumCXXABI::getAddrOfVTable(const CXXRecordDecl *RD, CharUnits VPtrOffset) { assert(VPtrOffset.isZero() && "Itanium ABI only supports zero vptr offsets"); llvm::GlobalVariable *&VTable = VTables[RD]; if (VTable) return VTable; // Queue up this vtable for possible deferred emission. CGM.addDeferredVTable(RD); SmallString<256> Name; llvm::raw_svector_ostream Out(Name); getMangleContext().mangleCXXVTable(RD, Out); const VTableLayout &VTLayout = CGM.getItaniumVTableContext().getVTableLayout(RD); llvm::Type *VTableType = CGM.getVTables().getVTableType(VTLayout); // Use pointer alignment for the vtable. Otherwise we would align them based // on the size of the initializer which doesn't make sense as only single // values are read. unsigned PAlign = CGM.getItaniumVTableContext().isRelativeLayout() ? 32 : CGM.getTarget().getPointerAlign(0); VTable = CGM.CreateOrReplaceCXXRuntimeVariable( Name, VTableType, llvm::GlobalValue::ExternalLinkage, getContext().toCharUnitsFromBits(PAlign).getQuantity()); VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global); CGM.setGVProperties(VTable, RD); return VTable; } CGCallee ItaniumCXXABI::getVirtualFunctionPointer(CodeGenFunction &CGF, GlobalDecl GD, Address This, llvm::Type *Ty, SourceLocation Loc) { auto *MethodDecl = cast<CXXMethodDecl>(GD.getDecl()); llvm::Value *VTable = CGF.GetVTablePtr( This, Ty->getPointerTo()->getPointerTo(), MethodDecl->getParent()); uint64_t VTableIndex = CGM.getItaniumVTableContext().getMethodVTableIndex(GD); llvm::Value *VFunc; if (CGF.ShouldEmitVTableTypeCheckedLoad(MethodDecl->getParent())) { VFunc = CGF.EmitVTableTypeCheckedLoad( MethodDecl->getParent(), VTable, VTableIndex * CGM.getContext().getTargetInfo().getPointerWidth(0) / 8); } else { CGF.EmitTypeMetadataCodeForVCall(MethodDecl->getParent(), VTable, Loc); llvm::Value *VFuncLoad; if (CGM.getItaniumVTableContext().isRelativeLayout()) { VTable = CGF.Builder.CreateBitCast(VTable, CGM.Int8PtrTy); llvm::Value *Load = CGF.Builder.CreateCall( CGM.getIntrinsic(llvm::Intrinsic::load_relative, {CGM.Int32Ty}), {VTable, llvm::ConstantInt::get(CGM.Int32Ty, 4 * VTableIndex)}); VFuncLoad = CGF.Builder.CreateBitCast(Load, Ty->getPointerTo()); } else { VTable = CGF.Builder.CreateBitCast(VTable, Ty->getPointerTo()->getPointerTo()); llvm::Value *VTableSlotPtr = CGF.Builder.CreateConstInBoundsGEP1_64(VTable, VTableIndex, "vfn"); VFuncLoad = CGF.Builder.CreateAlignedLoad(VTableSlotPtr, CGF.getPointerAlign()); } // Add !invariant.load md to virtual function load to indicate that // function didn't change inside vtable. // It's safe to add it without -fstrict-vtable-pointers, but it would not // help in devirtualization because it will only matter if we will have 2 // the same virtual function loads from the same vtable load, which won't // happen without enabled devirtualization with -fstrict-vtable-pointers. if (CGM.getCodeGenOpts().OptimizationLevel > 0 && CGM.getCodeGenOpts().StrictVTablePointers) { if (auto *VFuncLoadInstr = dyn_cast<llvm::Instruction>(VFuncLoad)) { VFuncLoadInstr->setMetadata( llvm::LLVMContext::MD_invariant_load, llvm::MDNode::get(CGM.getLLVMContext(), llvm::ArrayRef<llvm::Metadata *>())); } } VFunc = VFuncLoad; } CGCallee Callee(GD, VFunc); return Callee; } llvm::Value *ItaniumCXXABI::EmitVirtualDestructorCall( CodeGenFunction &CGF, const CXXDestructorDecl *Dtor, CXXDtorType DtorType, Address This, DeleteOrMemberCallExpr E) { auto *CE = E.dyn_cast<const CXXMemberCallExpr *>(); auto *D = E.dyn_cast<const CXXDeleteExpr *>(); assert((CE != nullptr) ^ (D != nullptr)); assert(CE == nullptr || CE->arg_begin() == CE->arg_end()); assert(DtorType == Dtor_Deleting || DtorType == Dtor_Complete); GlobalDecl GD(Dtor, DtorType); const CGFunctionInfo *FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(GD); llvm::FunctionType *Ty = CGF.CGM.getTypes().GetFunctionType(*FInfo); CGCallee Callee = CGCallee::forVirtual(CE, GD, This, Ty); QualType ThisTy; if (CE) { ThisTy = CE->getObjectType(); } else { ThisTy = D->getDestroyedType(); } CGF.EmitCXXDestructorCall(GD, Callee, This.getPointer(), ThisTy, nullptr, QualType(), nullptr); return nullptr; } void ItaniumCXXABI::emitVirtualInheritanceTables(const CXXRecordDecl *RD) { CodeGenVTables &VTables = CGM.getVTables(); llvm::GlobalVariable *VTT = VTables.GetAddrOfVTT(RD); VTables.EmitVTTDefinition(VTT, CGM.getVTableLinkage(RD), RD); } bool ItaniumCXXABI::canSpeculativelyEmitVTableAsBaseClass( const CXXRecordDecl *RD) const { // We don't emit available_externally vtables if we are in -fapple-kext mode // because kext mode does not permit devirtualization. if (CGM.getLangOpts().AppleKext) return false; // If the vtable is hidden then it is not safe to emit an available_externally // copy of vtable. if (isVTableHidden(RD)) return false; if (CGM.getCodeGenOpts().ForceEmitVTables) return true; // If we don't have any not emitted inline virtual function then we are safe // to emit an available_externally copy of vtable. // FIXME we can still emit a copy of the vtable if we // can emit definition of the inline functions. if (hasAnyUnusedVirtualInlineFunction(RD)) return false; // For a class with virtual bases, we must also be able to speculatively // emit the VTT, because CodeGen doesn't have separate notions of "can emit // the vtable" and "can emit the VTT". For a base subobject, this means we // need to be able to emit non-virtual base vtables. if (RD->getNumVBases()) { for (const auto &B : RD->bases()) { auto *BRD = B.getType()->getAsCXXRecordDecl(); assert(BRD && "no class for base specifier"); if (B.isVirtual() || !BRD->isDynamicClass()) continue; if (!canSpeculativelyEmitVTableAsBaseClass(BRD)) return false; } } return true; } bool ItaniumCXXABI::canSpeculativelyEmitVTable(const CXXRecordDecl *RD) const { if (!canSpeculativelyEmitVTableAsBaseClass(RD)) return false; // For a complete-object vtable (or more specifically, for the VTT), we need // to be able to speculatively emit the vtables of all dynamic virtual bases. for (const auto &B : RD->vbases()) { auto *BRD = B.getType()->getAsCXXRecordDecl(); assert(BRD && "no class for base specifier"); if (!BRD->isDynamicClass()) continue; if (!canSpeculativelyEmitVTableAsBaseClass(BRD)) return false; } return true; } static llvm::Value *performTypeAdjustment(CodeGenFunction &CGF, Address InitialPtr, int64_t NonVirtualAdjustment, int64_t VirtualAdjustment, bool IsReturnAdjustment) { if (!NonVirtualAdjustment && !VirtualAdjustment) return InitialPtr.getPointer(); Address V = CGF.Builder.CreateElementBitCast(InitialPtr, CGF.Int8Ty); // In a base-to-derived cast, the non-virtual adjustment is applied first. if (NonVirtualAdjustment && !IsReturnAdjustment) { V = CGF.Builder.CreateConstInBoundsByteGEP(V, CharUnits::fromQuantity(NonVirtualAdjustment)); } // Perform the virtual adjustment if we have one. llvm::Value *ResultPtr; if (VirtualAdjustment) { Address VTablePtrPtr = CGF.Builder.CreateElementBitCast(V, CGF.Int8PtrTy); llvm::Value *VTablePtr = CGF.Builder.CreateLoad(VTablePtrPtr); llvm::Value *Offset; llvm::Value *OffsetPtr = CGF.Builder.CreateConstInBoundsGEP1_64(VTablePtr, VirtualAdjustment); if (CGF.CGM.getItaniumVTableContext().isRelativeLayout()) { // Load the adjustment offset from the vtable as a 32-bit int. OffsetPtr = CGF.Builder.CreateBitCast(OffsetPtr, CGF.Int32Ty->getPointerTo()); Offset = CGF.Builder.CreateAlignedLoad(OffsetPtr, CharUnits::fromQuantity(4)); } else { llvm::Type *PtrDiffTy = CGF.ConvertType(CGF.getContext().getPointerDiffType()); OffsetPtr = CGF.Builder.CreateBitCast(OffsetPtr, PtrDiffTy->getPointerTo()); // Load the adjustment offset from the vtable. Offset = CGF.Builder.CreateAlignedLoad(OffsetPtr, CGF.getPointerAlign()); } // Adjust our pointer. ResultPtr = CGF.Builder.CreateInBoundsGEP(V.getPointer(), Offset); } else { ResultPtr = V.getPointer(); } // In a derived-to-base conversion, the non-virtual adjustment is // applied second. if (NonVirtualAdjustment && IsReturnAdjustment) { ResultPtr = CGF.Builder.CreateConstInBoundsGEP1_64(ResultPtr, NonVirtualAdjustment); } // Cast back to the original type. return CGF.Builder.CreateBitCast(ResultPtr, InitialPtr.getType()); } llvm::Value *ItaniumCXXABI::performThisAdjustment(CodeGenFunction &CGF, Address This, const ThisAdjustment &TA) { return performTypeAdjustment(CGF, This, TA.NonVirtual, TA.Virtual.Itanium.VCallOffsetOffset, /*IsReturnAdjustment=*/false); } llvm::Value * ItaniumCXXABI::performReturnAdjustment(CodeGenFunction &CGF, Address Ret, const ReturnAdjustment &RA) { return performTypeAdjustment(CGF, Ret, RA.NonVirtual, RA.Virtual.Itanium.VBaseOffsetOffset, /*IsReturnAdjustment=*/true); } void ARMCXXABI::EmitReturnFromThunk(CodeGenFunction &CGF, RValue RV, QualType ResultType) { if (!isa<CXXDestructorDecl>(CGF.CurGD.getDecl())) return ItaniumCXXABI::EmitReturnFromThunk(CGF, RV, ResultType); // Destructor thunks in the ARM ABI have indeterminate results. llvm::Type *T = CGF.ReturnValue.getElementType(); RValue Undef = RValue::get(llvm::UndefValue::get(T)); return ItaniumCXXABI::EmitReturnFromThunk(CGF, Undef, ResultType); } /************************** Array allocation cookies **************************/ CharUnits ItaniumCXXABI::getArrayCookieSizeImpl(QualType elementType) { // The array cookie is a size_t; pad that up to the element alignment. // The cookie is actually right-justified in that space. return std::max(CharUnits::fromQuantity(CGM.SizeSizeInBytes), CGM.getContext().getTypeAlignInChars(elementType)); } Address ItaniumCXXABI::InitializeArrayCookie(CodeGenFunction &CGF, Address NewPtr, llvm::Value *NumElements, const CXXNewExpr *expr, QualType ElementType) { assert(requiresArrayCookie(expr)); unsigned AS = NewPtr.getAddressSpace(); ASTContext &Ctx = getContext(); CharUnits SizeSize = CGF.getSizeSize(); // The size of the cookie. CharUnits CookieSize = std::max(SizeSize, Ctx.getTypeAlignInChars(ElementType)); assert(CookieSize == getArrayCookieSizeImpl(ElementType)); // Compute an offset to the cookie. Address CookiePtr = NewPtr; CharUnits CookieOffset = CookieSize - SizeSize; if (!CookieOffset.isZero()) CookiePtr = CGF.Builder.CreateConstInBoundsByteGEP(CookiePtr, CookieOffset); // Write the number of elements into the appropriate slot. Address NumElementsPtr = CGF.Builder.CreateElementBitCast(CookiePtr, CGF.SizeTy); llvm::Instruction *SI = CGF.Builder.CreateStore(NumElements, NumElementsPtr); // Handle the array cookie specially in ASan. if (CGM.getLangOpts().Sanitize.has(SanitizerKind::Address) && AS == 0 && (expr->getOperatorNew()->isReplaceableGlobalAllocationFunction() || CGM.getCodeGenOpts().SanitizeAddressPoisonCustomArrayCookie)) { // The store to the CookiePtr does not need to be instrumented. CGM.getSanitizerMetadata()->disableSanitizerForInstruction(SI); llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, NumElementsPtr.getType(), false); llvm::FunctionCallee F = CGM.CreateRuntimeFunction(FTy, "__asan_poison_cxx_array_cookie"); CGF.Builder.CreateCall(F, NumElementsPtr.getPointer()); } // Finally, compute a pointer to the actual data buffer by skipping // over the cookie completely. return CGF.Builder.CreateConstInBoundsByteGEP(NewPtr, CookieSize); } llvm::Value *ItaniumCXXABI::readArrayCookieImpl(CodeGenFunction &CGF, Address allocPtr, CharUnits cookieSize) { // The element size is right-justified in the cookie. Address numElementsPtr = allocPtr; CharUnits numElementsOffset = cookieSize - CGF.getSizeSize(); if (!numElementsOffset.isZero()) numElementsPtr = CGF.Builder.CreateConstInBoundsByteGEP(numElementsPtr, numElementsOffset); unsigned AS = allocPtr.getAddressSpace(); numElementsPtr = CGF.Builder.CreateElementBitCast(numElementsPtr, CGF.SizeTy); if (!CGM.getLangOpts().Sanitize.has(SanitizerKind::Address) || AS != 0) return CGF.Builder.CreateLoad(numElementsPtr); // In asan mode emit a function call instead of a regular load and let the // run-time deal with it: if the shadow is properly poisoned return the // cookie, otherwise return 0 to avoid an infinite loop calling DTORs. // We can't simply ignore this load using nosanitize metadata because // the metadata may be lost. llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.SizeTy, CGF.SizeTy->getPointerTo(0), false); llvm::FunctionCallee F = CGM.CreateRuntimeFunction(FTy, "__asan_load_cxx_array_cookie"); return CGF.Builder.CreateCall(F, numElementsPtr.getPointer()); } CharUnits ARMCXXABI::getArrayCookieSizeImpl(QualType elementType) { // ARM says that the cookie is always: // struct array_cookie { // std::size_t element_size; // element_size != 0 // std::size_t element_count; // }; // But the base ABI doesn't give anything an alignment greater than // 8, so we can dismiss this as typical ABI-author blindness to // actual language complexity and round up to the element alignment. return std::max(CharUnits::fromQuantity(2 * CGM.SizeSizeInBytes), CGM.getContext().getTypeAlignInChars(elementType)); } Address ARMCXXABI::InitializeArrayCookie(CodeGenFunction &CGF, Address newPtr, llvm::Value *numElements, const CXXNewExpr *expr, QualType elementType) { assert(requiresArrayCookie(expr)); // The cookie is always at the start of the buffer. Address cookie = newPtr; // The first element is the element size. cookie = CGF.Builder.CreateElementBitCast(cookie, CGF.SizeTy); llvm::Value *elementSize = llvm::ConstantInt::get(CGF.SizeTy, getContext().getTypeSizeInChars(elementType).getQuantity()); CGF.Builder.CreateStore(elementSize, cookie); // The second element is the element count. cookie = CGF.Builder.CreateConstInBoundsGEP(cookie, 1); CGF.Builder.CreateStore(numElements, cookie); // Finally, compute a pointer to the actual data buffer by skipping // over the cookie completely. CharUnits cookieSize = ARMCXXABI::getArrayCookieSizeImpl(elementType); return CGF.Builder.CreateConstInBoundsByteGEP(newPtr, cookieSize); } llvm::Value *ARMCXXABI::readArrayCookieImpl(CodeGenFunction &CGF, Address allocPtr, CharUnits cookieSize) { // The number of elements is at offset sizeof(size_t) relative to // the allocated pointer. Address numElementsPtr = CGF.Builder.CreateConstInBoundsByteGEP(allocPtr, CGF.getSizeSize()); numElementsPtr = CGF.Builder.CreateElementBitCast(numElementsPtr, CGF.SizeTy); return CGF.Builder.CreateLoad(numElementsPtr); } /*********************** Static local initialization **************************/ static llvm::FunctionCallee getGuardAcquireFn(CodeGenModule &CGM, llvm::PointerType *GuardPtrTy) { // int __cxa_guard_acquire(__guard *guard_object); llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.getTypes().ConvertType(CGM.getContext().IntTy), GuardPtrTy, /*isVarArg=*/false); return CGM.CreateRuntimeFunction( FTy, "__cxa_guard_acquire", llvm::AttributeList::get(CGM.getLLVMContext(), llvm::AttributeList::FunctionIndex, llvm::Attribute::NoUnwind)); } static llvm::FunctionCallee getGuardReleaseFn(CodeGenModule &CGM, llvm::PointerType *GuardPtrTy) { // void __cxa_guard_release(__guard *guard_object); llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, GuardPtrTy, /*isVarArg=*/false); return CGM.CreateRuntimeFunction( FTy, "__cxa_guard_release", llvm::AttributeList::get(CGM.getLLVMContext(), llvm::AttributeList::FunctionIndex, llvm::Attribute::NoUnwind)); } static llvm::FunctionCallee getGuardAbortFn(CodeGenModule &CGM, llvm::PointerType *GuardPtrTy) { // void __cxa_guard_abort(__guard *guard_object); llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, GuardPtrTy, /*isVarArg=*/false); return CGM.CreateRuntimeFunction( FTy, "__cxa_guard_abort", llvm::AttributeList::get(CGM.getLLVMContext(), llvm::AttributeList::FunctionIndex, llvm::Attribute::NoUnwind)); } namespace { struct CallGuardAbort final : EHScopeStack::Cleanup { llvm::GlobalVariable *Guard; CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {} void Emit(CodeGenFunction &CGF, Flags flags) override { CGF.EmitNounwindRuntimeCall(getGuardAbortFn(CGF.CGM, Guard->getType()), Guard); } }; } /// The ARM code here follows the Itanium code closely enough that we /// just special-case it at particular places. void ItaniumCXXABI::EmitGuardedInit(CodeGenFunction &CGF, const VarDecl &D, llvm::GlobalVariable *var, bool shouldPerformInit) { CGBuilderTy &Builder = CGF.Builder; // Inline variables that weren't instantiated from variable templates have // partially-ordered initialization within their translation unit. bool NonTemplateInline = D.isInline() && !isTemplateInstantiation(D.getTemplateSpecializationKind()); // We only need to use thread-safe statics for local non-TLS variables and // inline variables; other global initialization is always single-threaded // or (through lazy dynamic loading in multiple threads) unsequenced. bool threadsafe = getContext().getLangOpts().ThreadsafeStatics && (D.isLocalVarDecl() || NonTemplateInline) && !D.getTLSKind(); // If we have a global variable with internal linkage and thread-safe statics // are disabled, we can just let the guard variable be of type i8. bool useInt8GuardVariable = !threadsafe && var->hasInternalLinkage(); llvm::IntegerType *guardTy; CharUnits guardAlignment; if (useInt8GuardVariable) { guardTy = CGF.Int8Ty; guardAlignment = CharUnits::One(); } else { // Guard variables are 64 bits in the generic ABI and size width on ARM // (i.e. 32-bit on AArch32, 64-bit on AArch64). if (UseARMGuardVarABI) { guardTy = CGF.SizeTy; guardAlignment = CGF.getSizeAlign(); } else { guardTy = CGF.Int64Ty; guardAlignment = CharUnits::fromQuantity( CGM.getDataLayout().getABITypeAlignment(guardTy)); } } llvm::PointerType *guardPtrTy = guardTy->getPointerTo(); // Create the guard variable if we don't already have it (as we // might if we're double-emitting this function body). llvm::GlobalVariable *guard = CGM.getStaticLocalDeclGuardAddress(&D); if (!guard) { // Mangle the name for the guard. SmallString<256> guardName; { llvm::raw_svector_ostream out(guardName); getMangleContext().mangleStaticGuardVariable(&D, out); } // Create the guard variable with a zero-initializer. // Just absorb linkage and visibility from the guarded variable. guard = new llvm::GlobalVariable(CGM.getModule(), guardTy, false, var->getLinkage(), llvm::ConstantInt::get(guardTy, 0), guardName.str()); guard->setDSOLocal(var->isDSOLocal()); guard->setVisibility(var->getVisibility()); // If the variable is thread-local, so is its guard variable. guard->setThreadLocalMode(var->getThreadLocalMode()); guard->setAlignment(guardAlignment.getAsAlign()); // The ABI says: "It is suggested that it be emitted in the same COMDAT // group as the associated data object." In practice, this doesn't work for // non-ELF and non-Wasm object formats, so only do it for ELF and Wasm. llvm::Comdat *C = var->getComdat(); if (!D.isLocalVarDecl() && C && (CGM.getTarget().getTriple().isOSBinFormatELF() || CGM.getTarget().getTriple().isOSBinFormatWasm())) { guard->setComdat(C); // An inline variable's guard function is run from the per-TU // initialization function, not via a dedicated global ctor function, so // we can't put it in a comdat. if (!NonTemplateInline) CGF.CurFn->setComdat(C); } else if (CGM.supportsCOMDAT() && guard->isWeakForLinker()) { guard->setComdat(CGM.getModule().getOrInsertComdat(guard->getName())); } CGM.setStaticLocalDeclGuardAddress(&D, guard); } Address guardAddr = Address(guard, guardAlignment); // Test whether the variable has completed initialization. // // Itanium C++ ABI 3.3.2: // The following is pseudo-code showing how these functions can be used: // if (obj_guard.first_byte == 0) { // if ( __cxa_guard_acquire (&obj_guard) ) { // try { // ... initialize the object ...; // } catch (...) { // __cxa_guard_abort (&obj_guard); // throw; // } // ... queue object destructor with __cxa_atexit() ...; // __cxa_guard_release (&obj_guard); // } // } // Load the first byte of the guard variable. llvm::LoadInst *LI = Builder.CreateLoad(Builder.CreateElementBitCast(guardAddr, CGM.Int8Ty)); // Itanium ABI: // An implementation supporting thread-safety on multiprocessor // systems must also guarantee that references to the initialized // object do not occur before the load of the initialization flag. // // In LLVM, we do this by marking the load Acquire. if (threadsafe) LI->setAtomic(llvm::AtomicOrdering::Acquire); // For ARM, we should only check the first bit, rather than the entire byte: // // ARM C++ ABI 3.2.3.1: // To support the potential use of initialization guard variables // as semaphores that are the target of ARM SWP and LDREX/STREX // synchronizing instructions we define a static initialization // guard variable to be a 4-byte aligned, 4-byte word with the // following inline access protocol. // #define INITIALIZED 1 // if ((obj_guard & INITIALIZED) != INITIALIZED) { // if (__cxa_guard_acquire(&obj_guard)) // ... // } // // and similarly for ARM64: // // ARM64 C++ ABI 3.2.2: // This ABI instead only specifies the value bit 0 of the static guard // variable; all other bits are platform defined. Bit 0 shall be 0 when the // variable is not initialized and 1 when it is. llvm::Value *V = (UseARMGuardVarABI && !useInt8GuardVariable) ? Builder.CreateAnd(LI, llvm::ConstantInt::get(CGM.Int8Ty, 1)) : LI; llvm::Value *NeedsInit = Builder.CreateIsNull(V, "guard.uninitialized"); llvm::BasicBlock *InitCheckBlock = CGF.createBasicBlock("init.check"); llvm::BasicBlock *EndBlock = CGF.createBasicBlock("init.end"); // Check if the first byte of the guard variable is zero. CGF.EmitCXXGuardedInitBranch(NeedsInit, InitCheckBlock, EndBlock, CodeGenFunction::GuardKind::VariableGuard, &D); CGF.EmitBlock(InitCheckBlock); // Variables used when coping with thread-safe statics and exceptions. if (threadsafe) { // Call __cxa_guard_acquire. llvm::Value *V = CGF.EmitNounwindRuntimeCall(getGuardAcquireFn(CGM, guardPtrTy), guard); llvm::BasicBlock *InitBlock = CGF.createBasicBlock("init"); Builder.CreateCondBr(Builder.CreateIsNotNull(V, "tobool"), InitBlock, EndBlock); // Call __cxa_guard_abort along the exceptional edge. CGF.EHStack.pushCleanup<CallGuardAbort>(EHCleanup, guard); CGF.EmitBlock(InitBlock); } // Emit the initializer and add a global destructor if appropriate. CGF.EmitCXXGlobalVarDeclInit(D, var, shouldPerformInit); if (threadsafe) { // Pop the guard-abort cleanup if we pushed one. CGF.PopCleanupBlock(); // Call __cxa_guard_release. This cannot throw. CGF.EmitNounwindRuntimeCall(getGuardReleaseFn(CGM, guardPtrTy), guardAddr.getPointer()); } else { Builder.CreateStore(llvm::ConstantInt::get(guardTy, 1), guardAddr); } CGF.EmitBlock(EndBlock); } /// Register a global destructor using __cxa_atexit. static void emitGlobalDtorWithCXAAtExit(CodeGenFunction &CGF, llvm::FunctionCallee dtor, llvm::Constant *addr, bool TLS) { assert((TLS || CGF.getTypes().getCodeGenOpts().CXAAtExit) && "__cxa_atexit is disabled"); const char *Name = "__cxa_atexit"; if (TLS) { const llvm::Triple &T = CGF.getTarget().getTriple(); Name = T.isOSDarwin() ? "_tlv_atexit" : "__cxa_thread_atexit"; } // We're assuming that the destructor function is something we can // reasonably call with the default CC. Go ahead and cast it to the // right prototype. llvm::Type *dtorTy = llvm::FunctionType::get(CGF.VoidTy, CGF.Int8PtrTy, false)->getPointerTo(); // Preserve address space of addr. auto AddrAS = addr ? addr->getType()->getPointerAddressSpace() : 0; auto AddrInt8PtrTy = AddrAS ? CGF.Int8Ty->getPointerTo(AddrAS) : CGF.Int8PtrTy; // Create a variable that binds the atexit to this shared object. llvm::Constant *handle = CGF.CGM.CreateRuntimeVariable(CGF.Int8Ty, "__dso_handle"); auto *GV = cast<llvm::GlobalValue>(handle->stripPointerCasts()); GV->setVisibility(llvm::GlobalValue::HiddenVisibility); // extern "C" int __cxa_atexit(void (*f)(void *), void *p, void *d); llvm::Type *paramTys[] = {dtorTy, AddrInt8PtrTy, handle->getType()}; llvm::FunctionType *atexitTy = llvm::FunctionType::get(CGF.IntTy, paramTys, false); // Fetch the actual function. llvm::FunctionCallee atexit = CGF.CGM.CreateRuntimeFunction(atexitTy, Name); if (llvm::Function *fn = dyn_cast<llvm::Function>(atexit.getCallee())) fn->setDoesNotThrow(); if (!addr) // addr is null when we are trying to register a dtor annotated with // __attribute__((destructor)) in a constructor function. Using null here is // okay because this argument is just passed back to the destructor // function. addr = llvm::Constant::getNullValue(CGF.Int8PtrTy); llvm::Value *args[] = {llvm::ConstantExpr::getBitCast( cast<llvm::Constant>(dtor.getCallee()), dtorTy), llvm::ConstantExpr::getBitCast(addr, AddrInt8PtrTy), handle}; CGF.EmitNounwindRuntimeCall(atexit, args); } void CodeGenModule::registerGlobalDtorsWithAtExit() { for (const auto &I : DtorsUsingAtExit) { int Priority = I.first; const llvm::TinyPtrVector<llvm::Function *> &Dtors = I.second; // Create a function that registers destructors that have the same priority. // // Since constructor functions are run in non-descending order of their // priorities, destructors are registered in non-descending order of their // priorities, and since destructor functions are run in the reverse order // of their registration, destructor functions are run in non-ascending // order of their priorities. CodeGenFunction CGF(*this); std::string GlobalInitFnName = std::string("__GLOBAL_init_") + llvm::to_string(Priority); llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false); llvm::Function *GlobalInitFn = CreateGlobalInitOrCleanUpFunction( FTy, GlobalInitFnName, getTypes().arrangeNullaryFunction(), SourceLocation()); ASTContext &Ctx = getContext(); QualType ReturnTy = Ctx.VoidTy; QualType FunctionTy = Ctx.getFunctionType(ReturnTy, llvm::None, {}); FunctionDecl *FD = FunctionDecl::Create( Ctx, Ctx.getTranslationUnitDecl(), SourceLocation(), SourceLocation(), &Ctx.Idents.get(GlobalInitFnName), FunctionTy, nullptr, SC_Static, false, false); CGF.StartFunction(GlobalDecl(FD), ReturnTy, GlobalInitFn, getTypes().arrangeNullaryFunction(), FunctionArgList(), SourceLocation(), SourceLocation()); for (auto *Dtor : Dtors) { // Register the destructor function calling __cxa_atexit if it is // available. Otherwise fall back on calling atexit. if (getCodeGenOpts().CXAAtExit) emitGlobalDtorWithCXAAtExit(CGF, Dtor, nullptr, false); else CGF.registerGlobalDtorWithAtExit(Dtor); } CGF.FinishFunction(); AddGlobalCtor(GlobalInitFn, Priority, nullptr); } } /// Register a global destructor as best as we know how. void ItaniumCXXABI::registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D, llvm::FunctionCallee dtor, llvm::Constant *addr) { if (D.isNoDestroy(CGM.getContext())) return; // emitGlobalDtorWithCXAAtExit will emit a call to either __cxa_thread_atexit // or __cxa_atexit depending on whether this VarDecl is a thread-local storage // or not. CXAAtExit controls only __cxa_atexit, so use it if it is enabled. // We can always use __cxa_thread_atexit. if (CGM.getCodeGenOpts().CXAAtExit || D.getTLSKind()) return emitGlobalDtorWithCXAAtExit(CGF, dtor, addr, D.getTLSKind()); // In Apple kexts, we want to add a global destructor entry. // FIXME: shouldn't this be guarded by some variable? if (CGM.getLangOpts().AppleKext) { // Generate a global destructor entry. return CGM.AddCXXDtorEntry(dtor, addr); } CGF.registerGlobalDtorWithAtExit(D, dtor, addr); } static bool isThreadWrapperReplaceable(const VarDecl *VD, CodeGen::CodeGenModule &CGM) { assert(!VD->isStaticLocal() && "static local VarDecls don't need wrappers!"); // Darwin prefers to have references to thread local variables to go through // the thread wrapper instead of directly referencing the backing variable. return VD->getTLSKind() == VarDecl::TLS_Dynamic && CGM.getTarget().getTriple().isOSDarwin(); } /// Get the appropriate linkage for the wrapper function. This is essentially /// the weak form of the variable's linkage; every translation unit which needs /// the wrapper emits a copy, and we want the linker to merge them. static llvm::GlobalValue::LinkageTypes getThreadLocalWrapperLinkage(const VarDecl *VD, CodeGen::CodeGenModule &CGM) { llvm::GlobalValue::LinkageTypes VarLinkage = CGM.getLLVMLinkageVarDefinition(VD, /*IsConstant=*/false); // For internal linkage variables, we don't need an external or weak wrapper. if (llvm::GlobalValue::isLocalLinkage(VarLinkage)) return VarLinkage; // If the thread wrapper is replaceable, give it appropriate linkage. if (isThreadWrapperReplaceable(VD, CGM)) if (!llvm::GlobalVariable::isLinkOnceLinkage(VarLinkage) && !llvm::GlobalVariable::isWeakODRLinkage(VarLinkage)) return VarLinkage; return llvm::GlobalValue::WeakODRLinkage; } llvm::Function * ItaniumCXXABI::getOrCreateThreadLocalWrapper(const VarDecl *VD, llvm::Value *Val) { // Mangle the name for the thread_local wrapper function. SmallString<256> WrapperName; { llvm::raw_svector_ostream Out(WrapperName); getMangleContext().mangleItaniumThreadLocalWrapper(VD, Out); } // FIXME: If VD is a definition, we should regenerate the function attributes // before returning. if (llvm::Value *V = CGM.getModule().getNamedValue(WrapperName)) return cast<llvm::Function>(V); QualType RetQT = VD->getType(); if (RetQT->isReferenceType()) RetQT = RetQT.getNonReferenceType(); const CGFunctionInfo &FI = CGM.getTypes().arrangeBuiltinFunctionDeclaration( getContext().getPointerType(RetQT), FunctionArgList()); llvm::FunctionType *FnTy = CGM.getTypes().GetFunctionType(FI); llvm::Function *Wrapper = llvm::Function::Create(FnTy, getThreadLocalWrapperLinkage(VD, CGM), WrapperName.str(), &CGM.getModule()); if (CGM.supportsCOMDAT() && Wrapper->isWeakForLinker()) Wrapper->setComdat(CGM.getModule().getOrInsertComdat(Wrapper->getName())); CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, Wrapper); // Always resolve references to the wrapper at link time. if (!Wrapper->hasLocalLinkage()) if (!isThreadWrapperReplaceable(VD, CGM) || llvm::GlobalVariable::isLinkOnceLinkage(Wrapper->getLinkage()) || llvm::GlobalVariable::isWeakODRLinkage(Wrapper->getLinkage()) || VD->getVisibility() == HiddenVisibility) Wrapper->setVisibility(llvm::GlobalValue::HiddenVisibility); if (isThreadWrapperReplaceable(VD, CGM)) { Wrapper->setCallingConv(llvm::CallingConv::CXX_FAST_TLS); Wrapper->addFnAttr(llvm::Attribute::NoUnwind); } ThreadWrappers.push_back({VD, Wrapper}); return Wrapper; } void ItaniumCXXABI::EmitThreadLocalInitFuncs( CodeGenModule &CGM, ArrayRef<const VarDecl *> CXXThreadLocals, ArrayRef<llvm::Function *> CXXThreadLocalInits, ArrayRef<const VarDecl *> CXXThreadLocalInitVars) { llvm::Function *InitFunc = nullptr; // Separate initializers into those with ordered (or partially-ordered) // initialization and those with unordered initialization. llvm::SmallVector<llvm::Function *, 8> OrderedInits; llvm::SmallDenseMap<const VarDecl *, llvm::Function *> UnorderedInits; for (unsigned I = 0; I != CXXThreadLocalInits.size(); ++I) { if (isTemplateInstantiation( CXXThreadLocalInitVars[I]->getTemplateSpecializationKind())) UnorderedInits[CXXThreadLocalInitVars[I]->getCanonicalDecl()] = CXXThreadLocalInits[I]; else OrderedInits.push_back(CXXThreadLocalInits[I]); } if (!OrderedInits.empty()) { // Generate a guarded initialization function. llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false); const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction(); InitFunc = CGM.CreateGlobalInitOrCleanUpFunction(FTy, "__tls_init", FI, SourceLocation(), /*TLS=*/true); llvm::GlobalVariable *Guard = new llvm::GlobalVariable( CGM.getModule(), CGM.Int8Ty, /*isConstant=*/false, llvm::GlobalVariable::InternalLinkage, llvm::ConstantInt::get(CGM.Int8Ty, 0), "__tls_guard"); Guard->setThreadLocal(true); Guard->setThreadLocalMode(CGM.GetDefaultLLVMTLSModel()); CharUnits GuardAlign = CharUnits::One(); Guard->setAlignment(GuardAlign.getAsAlign()); CodeGenFunction(CGM).GenerateCXXGlobalInitFunc( InitFunc, OrderedInits, ConstantAddress(Guard, GuardAlign)); // On Darwin platforms, use CXX_FAST_TLS calling convention. if (CGM.getTarget().getTriple().isOSDarwin()) { InitFunc->setCallingConv(llvm::CallingConv::CXX_FAST_TLS); InitFunc->addFnAttr(llvm::Attribute::NoUnwind); } } // Create declarations for thread wrappers for all thread-local variables // with non-discardable definitions in this translation unit. for (const VarDecl *VD : CXXThreadLocals) { if (VD->hasDefinition() && !isDiscardableGVALinkage(getContext().GetGVALinkageForVariable(VD))) { llvm::GlobalValue *GV = CGM.GetGlobalValue(CGM.getMangledName(VD)); getOrCreateThreadLocalWrapper(VD, GV); } } // Emit all referenced thread wrappers. for (auto VDAndWrapper : ThreadWrappers) { const VarDecl *VD = VDAndWrapper.first; llvm::GlobalVariable *Var = cast<llvm::GlobalVariable>(CGM.GetGlobalValue(CGM.getMangledName(VD))); llvm::Function *Wrapper = VDAndWrapper.second; // Some targets require that all access to thread local variables go through // the thread wrapper. This means that we cannot attempt to create a thread // wrapper or a thread helper. if (!VD->hasDefinition()) { if (isThreadWrapperReplaceable(VD, CGM)) { Wrapper->setLinkage(llvm::Function::ExternalLinkage); continue; } // If this isn't a TU in which this variable is defined, the thread // wrapper is discardable. if (Wrapper->getLinkage() == llvm::Function::WeakODRLinkage) Wrapper->setLinkage(llvm::Function::LinkOnceODRLinkage); } CGM.SetLLVMFunctionAttributesForDefinition(nullptr, Wrapper); // Mangle the name for the thread_local initialization function. SmallString<256> InitFnName; { llvm::raw_svector_ostream Out(InitFnName); getMangleContext().mangleItaniumThreadLocalInit(VD, Out); } llvm::FunctionType *InitFnTy = llvm::FunctionType::get(CGM.VoidTy, false); // If we have a definition for the variable, emit the initialization // function as an alias to the global Init function (if any). Otherwise, // produce a declaration of the initialization function. llvm::GlobalValue *Init = nullptr; bool InitIsInitFunc = false; bool HasConstantInitialization = false; if (!usesThreadWrapperFunction(VD)) { HasConstantInitialization = true; } else if (VD->hasDefinition()) { InitIsInitFunc = true; llvm::Function *InitFuncToUse = InitFunc; if (isTemplateInstantiation(VD->getTemplateSpecializationKind())) InitFuncToUse = UnorderedInits.lookup(VD->getCanonicalDecl()); if (InitFuncToUse) Init = llvm::GlobalAlias::create(Var->getLinkage(), InitFnName.str(), InitFuncToUse); } else { // Emit a weak global function referring to the initialization function. // This function will not exist if the TU defining the thread_local // variable in question does not need any dynamic initialization for // its thread_local variables. Init = llvm::Function::Create(InitFnTy, llvm::GlobalVariable::ExternalWeakLinkage, InitFnName.str(), &CGM.getModule()); const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction(); CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, cast<llvm::Function>(Init)); } if (Init) { Init->setVisibility(Var->getVisibility()); // Don't mark an extern_weak function DSO local on windows. if (!CGM.getTriple().isOSWindows() || !Init->hasExternalWeakLinkage()) Init->setDSOLocal(Var->isDSOLocal()); } llvm::LLVMContext &Context = CGM.getModule().getContext(); llvm::BasicBlock *Entry = llvm::BasicBlock::Create(Context, "", Wrapper); CGBuilderTy Builder(CGM, Entry); if (HasConstantInitialization) { // No dynamic initialization to invoke. } else if (InitIsInitFunc) { if (Init) { llvm::CallInst *CallVal = Builder.CreateCall(InitFnTy, Init); if (isThreadWrapperReplaceable(VD, CGM)) { CallVal->setCallingConv(llvm::CallingConv::CXX_FAST_TLS); llvm::Function *Fn = cast<llvm::Function>(cast<llvm::GlobalAlias>(Init)->getAliasee()); Fn->setCallingConv(llvm::CallingConv::CXX_FAST_TLS); } } } else { // Don't know whether we have an init function. Call it if it exists. llvm::Value *Have = Builder.CreateIsNotNull(Init); llvm::BasicBlock *InitBB = llvm::BasicBlock::Create(Context, "", Wrapper); llvm::BasicBlock *ExitBB = llvm::BasicBlock::Create(Context, "", Wrapper); Builder.CreateCondBr(Have, InitBB, ExitBB); Builder.SetInsertPoint(InitBB); Builder.CreateCall(InitFnTy, Init); Builder.CreateBr(ExitBB); Builder.SetInsertPoint(ExitBB); } // For a reference, the result of the wrapper function is a pointer to // the referenced object. llvm::Value *Val = Var; if (VD->getType()->isReferenceType()) { CharUnits Align = CGM.getContext().getDeclAlign(VD); Val = Builder.CreateAlignedLoad(Val, Align); } if (Val->getType() != Wrapper->getReturnType()) Val = Builder.CreatePointerBitCastOrAddrSpaceCast( Val, Wrapper->getReturnType(), ""); Builder.CreateRet(Val); } } LValue ItaniumCXXABI::EmitThreadLocalVarDeclLValue(CodeGenFunction &CGF, const VarDecl *VD, QualType LValType) { llvm::Value *Val = CGF.CGM.GetAddrOfGlobalVar(VD); llvm::Function *Wrapper = getOrCreateThreadLocalWrapper(VD, Val); llvm::CallInst *CallVal = CGF.Builder.CreateCall(Wrapper); CallVal->setCallingConv(Wrapper->getCallingConv()); LValue LV; if (VD->getType()->isReferenceType()) LV = CGF.MakeNaturalAlignAddrLValue(CallVal, LValType); else LV = CGF.MakeAddrLValue(CallVal, LValType, CGF.getContext().getDeclAlign(VD)); // FIXME: need setObjCGCLValueClass? return LV; } /// Return whether the given global decl needs a VTT parameter, which it does /// if it's a base constructor or destructor with virtual bases. bool ItaniumCXXABI::NeedsVTTParameter(GlobalDecl GD) { const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl()); // We don't have any virtual bases, just return early. if (!MD->getParent()->getNumVBases()) return false; // Check if we have a base constructor. if (isa<CXXConstructorDecl>(MD) && GD.getCtorType() == Ctor_Base) return true; // Check if we have a base destructor. if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base) return true; return false; } namespace { class ItaniumRTTIBuilder { CodeGenModule &CGM; // Per-module state. llvm::LLVMContext &VMContext; const ItaniumCXXABI &CXXABI; // Per-module state. /// Fields - The fields of the RTTI descriptor currently being built. SmallVector<llvm::Constant *, 16> Fields; /// GetAddrOfTypeName - Returns the mangled type name of the given type. llvm::GlobalVariable * GetAddrOfTypeName(QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage); /// GetAddrOfExternalRTTIDescriptor - Returns the constant for the RTTI /// descriptor of the given type. llvm::Constant *GetAddrOfExternalRTTIDescriptor(QualType Ty); /// BuildVTablePointer - Build the vtable pointer for the given type. void BuildVTablePointer(const Type *Ty); /// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single /// inheritance, according to the Itanium C++ ABI, 2.9.5p6b. void BuildSIClassTypeInfo(const CXXRecordDecl *RD); /// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for /// classes with bases that do not satisfy the abi::__si_class_type_info /// constraints, according ti the Itanium C++ ABI, 2.9.5p5c. void BuildVMIClassTypeInfo(const CXXRecordDecl *RD); /// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct, used /// for pointer types. void BuildPointerTypeInfo(QualType PointeeTy); /// BuildObjCObjectTypeInfo - Build the appropriate kind of /// type_info for an object type. void BuildObjCObjectTypeInfo(const ObjCObjectType *Ty); /// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info /// struct, used for member pointer types. void BuildPointerToMemberTypeInfo(const MemberPointerType *Ty); public: ItaniumRTTIBuilder(const ItaniumCXXABI &ABI) : CGM(ABI.CGM), VMContext(CGM.getModule().getContext()), CXXABI(ABI) {} // Pointer type info flags. enum { /// PTI_Const - Type has const qualifier. PTI_Const = 0x1, /// PTI_Volatile - Type has volatile qualifier. PTI_Volatile = 0x2, /// PTI_Restrict - Type has restrict qualifier. PTI_Restrict = 0x4, /// PTI_Incomplete - Type is incomplete. PTI_Incomplete = 0x8, /// PTI_ContainingClassIncomplete - Containing class is incomplete. /// (in pointer to member). PTI_ContainingClassIncomplete = 0x10, /// PTI_TransactionSafe - Pointee is transaction_safe function (C++ TM TS). //PTI_TransactionSafe = 0x20, /// PTI_Noexcept - Pointee is noexcept function (C++1z). PTI_Noexcept = 0x40, }; // VMI type info flags. enum { /// VMI_NonDiamondRepeat - Class has non-diamond repeated inheritance. VMI_NonDiamondRepeat = 0x1, /// VMI_DiamondShaped - Class is diamond shaped. VMI_DiamondShaped = 0x2 }; // Base class type info flags. enum { /// BCTI_Virtual - Base class is virtual. BCTI_Virtual = 0x1, /// BCTI_Public - Base class is public. BCTI_Public = 0x2 }; /// BuildTypeInfo - Build the RTTI type info struct for the given type, or /// link to an existing RTTI descriptor if one already exists. llvm::Constant *BuildTypeInfo(QualType Ty); /// BuildTypeInfo - Build the RTTI type info struct for the given type. llvm::Constant *BuildTypeInfo( QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage, llvm::GlobalValue::VisibilityTypes Visibility, llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass); }; } llvm::GlobalVariable *ItaniumRTTIBuilder::GetAddrOfTypeName( QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage) { SmallString<256> Name; llvm::raw_svector_ostream Out(Name); CGM.getCXXABI().getMangleContext().mangleCXXRTTIName(Ty, Out); // We know that the mangled name of the type starts at index 4 of the // mangled name of the typename, so we can just index into it in order to // get the mangled name of the type. llvm::Constant *Init = llvm::ConstantDataArray::getString(VMContext, Name.substr(4)); auto Align = CGM.getContext().getTypeAlignInChars(CGM.getContext().CharTy); llvm::GlobalVariable *GV = CGM.CreateOrReplaceCXXRuntimeVariable( Name, Init->getType(), Linkage, Align.getQuantity()); GV->setInitializer(Init); return GV; } llvm::Constant * ItaniumRTTIBuilder::GetAddrOfExternalRTTIDescriptor(QualType Ty) { // Mangle the RTTI name. SmallString<256> Name; llvm::raw_svector_ostream Out(Name); CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out); // Look for an existing global. llvm::GlobalVariable *GV = CGM.getModule().getNamedGlobal(Name); if (!GV) { // Create a new global variable. // Note for the future: If we would ever like to do deferred emission of // RTTI, check if emitting vtables opportunistically need any adjustment. GV = new llvm::GlobalVariable(CGM.getModule(), CGM.Int8PtrTy, /*isConstant=*/true, llvm::GlobalValue::ExternalLinkage, nullptr, Name); const CXXRecordDecl *RD = Ty->getAsCXXRecordDecl(); CGM.setGVProperties(GV, RD); } return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy); } /// TypeInfoIsInStandardLibrary - Given a builtin type, returns whether the type /// info for that type is defined in the standard library. static bool TypeInfoIsInStandardLibrary(const BuiltinType *Ty) { // Itanium C++ ABI 2.9.2: // Basic type information (e.g. for "int", "bool", etc.) will be kept in // the run-time support library. Specifically, the run-time support // library should contain type_info objects for the types X, X* and // X const*, for every X in: void, std::nullptr_t, bool, wchar_t, char, // unsigned char, signed char, short, unsigned short, int, unsigned int, // long, unsigned long, long long, unsigned long long, float, double, // long double, char16_t, char32_t, and the IEEE 754r decimal and // half-precision floating point types. // // GCC also emits RTTI for __int128. // FIXME: We do not emit RTTI information for decimal types here. // Types added here must also be added to EmitFundamentalRTTIDescriptors. switch (Ty->getKind()) { case BuiltinType::Void: case BuiltinType::NullPtr: case BuiltinType::Bool: case BuiltinType::WChar_S: case BuiltinType::WChar_U: case BuiltinType::Char_U: case BuiltinType::Char_S: case BuiltinType::UChar: case BuiltinType::SChar: case BuiltinType::Short: case BuiltinType::UShort: case BuiltinType::Int: case BuiltinType::UInt: case BuiltinType::Long: case BuiltinType::ULong: case BuiltinType::LongLong: case BuiltinType::ULongLong: case BuiltinType::Half: case BuiltinType::Float: case BuiltinType::Double: case BuiltinType::LongDouble: case BuiltinType::Float16: case BuiltinType::Float128: case BuiltinType::Char8: case BuiltinType::Char16: case BuiltinType::Char32: case BuiltinType::Int128: case BuiltinType::UInt128: return true; #define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \ case BuiltinType::Id: #include "clang/Basic/OpenCLImageTypes.def" #define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \ case BuiltinType::Id: #include "clang/Basic/OpenCLExtensionTypes.def" case BuiltinType::OCLSampler: case BuiltinType::OCLEvent: case BuiltinType::OCLClkEvent: case BuiltinType::OCLQueue: case BuiltinType::OCLReserveID: #define SVE_TYPE(Name, Id, SingletonId) \ case BuiltinType::Id: #include "clang/Basic/AArch64SVEACLETypes.def" case BuiltinType::ShortAccum: case BuiltinType::Accum: case BuiltinType::LongAccum: case BuiltinType::UShortAccum: case BuiltinType::UAccum: case BuiltinType::ULongAccum: case BuiltinType::ShortFract: case BuiltinType::Fract: case BuiltinType::LongFract: case BuiltinType::UShortFract: case BuiltinType::UFract: case BuiltinType::ULongFract: case BuiltinType::SatShortAccum: case BuiltinType::SatAccum: case BuiltinType::SatLongAccum: case BuiltinType::SatUShortAccum: case BuiltinType::SatUAccum: case BuiltinType::SatULongAccum: case BuiltinType::SatShortFract: case BuiltinType::SatFract: case BuiltinType::SatLongFract: case BuiltinType::SatUShortFract: case BuiltinType::SatUFract: case BuiltinType::SatULongFract: case BuiltinType::BFloat16: return false; case BuiltinType::Dependent: #define BUILTIN_TYPE(Id, SingletonId) #define PLACEHOLDER_TYPE(Id, SingletonId) \ case BuiltinType::Id: #include "clang/AST/BuiltinTypes.def" llvm_unreachable("asking for RRTI for a placeholder type!"); case BuiltinType::ObjCId: case BuiltinType::ObjCClass: case BuiltinType::ObjCSel: llvm_unreachable("FIXME: Objective-C types are unsupported!"); } llvm_unreachable("Invalid BuiltinType Kind!"); } static bool TypeInfoIsInStandardLibrary(const PointerType *PointerTy) { QualType PointeeTy = PointerTy->getPointeeType(); const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(PointeeTy); if (!BuiltinTy) return false; // Check the qualifiers. Qualifiers Quals = PointeeTy.getQualifiers(); Quals.removeConst(); if (!Quals.empty()) return false; return TypeInfoIsInStandardLibrary(BuiltinTy); } /// IsStandardLibraryRTTIDescriptor - Returns whether the type /// information for the given type exists in the standard library. static bool IsStandardLibraryRTTIDescriptor(QualType Ty) { // Type info for builtin types is defined in the standard library. if (const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(Ty)) return TypeInfoIsInStandardLibrary(BuiltinTy); // Type info for some pointer types to builtin types is defined in the // standard library. if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty)) return TypeInfoIsInStandardLibrary(PointerTy); return false; } /// ShouldUseExternalRTTIDescriptor - Returns whether the type information for /// the given type exists somewhere else, and that we should not emit the type /// information in this translation unit. Assumes that it is not a /// standard-library type. static bool ShouldUseExternalRTTIDescriptor(CodeGenModule &CGM, QualType Ty) { ASTContext &Context = CGM.getContext(); // If RTTI is disabled, assume it might be disabled in the // translation unit that defines any potential key function, too. if (!Context.getLangOpts().RTTI) return false; if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) { const CXXRecordDecl *RD = cast<CXXRecordDecl>(RecordTy->getDecl()); if (!RD->hasDefinition()) return false; if (!RD->isDynamicClass()) return false; // FIXME: this may need to be reconsidered if the key function // changes. // N.B. We must always emit the RTTI data ourselves if there exists a key // function. bool IsDLLImport = RD->hasAttr<DLLImportAttr>(); // Don't import the RTTI but emit it locally. if (CGM.getTriple().isWindowsGNUEnvironment()) return false; if (CGM.getVTables().isVTableExternal(RD)) return IsDLLImport && !CGM.getTriple().isWindowsItaniumEnvironment() ? false : true; if (IsDLLImport) return true; } return false; } /// IsIncompleteClassType - Returns whether the given record type is incomplete. static bool IsIncompleteClassType(const RecordType *RecordTy) { return !RecordTy->getDecl()->isCompleteDefinition(); } /// ContainsIncompleteClassType - Returns whether the given type contains an /// incomplete class type. This is true if /// /// * The given type is an incomplete class type. /// * The given type is a pointer type whose pointee type contains an /// incomplete class type. /// * The given type is a member pointer type whose class is an incomplete /// class type. /// * The given type is a member pointer type whoise pointee type contains an /// incomplete class type. /// is an indirect or direct pointer to an incomplete class type. static bool ContainsIncompleteClassType(QualType Ty) { if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) { if (IsIncompleteClassType(RecordTy)) return true; } if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty)) return ContainsIncompleteClassType(PointerTy->getPointeeType()); if (const MemberPointerType *MemberPointerTy = dyn_cast<MemberPointerType>(Ty)) { // Check if the class type is incomplete. const RecordType *ClassType = cast<RecordType>(MemberPointerTy->getClass()); if (IsIncompleteClassType(ClassType)) return true; return ContainsIncompleteClassType(MemberPointerTy->getPointeeType()); } return false; } // CanUseSingleInheritance - Return whether the given record decl has a "single, // public, non-virtual base at offset zero (i.e. the derived class is dynamic // iff the base is)", according to Itanium C++ ABI, 2.95p6b. static bool CanUseSingleInheritance(const CXXRecordDecl *RD) { // Check the number of bases. if (RD->getNumBases() != 1) return false; // Get the base. CXXRecordDecl::base_class_const_iterator Base = RD->bases_begin(); // Check that the base is not virtual. if (Base->isVirtual()) return false; // Check that the base is public. if (Base->getAccessSpecifier() != AS_public) return false; // Check that the class is dynamic iff the base is. auto *BaseDecl = cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl()); if (!BaseDecl->isEmpty() && BaseDecl->isDynamicClass() != RD->isDynamicClass()) return false; return true; } void ItaniumRTTIBuilder::BuildVTablePointer(const Type *Ty) { // abi::__class_type_info. static const char * const ClassTypeInfo = "_ZTVN10__cxxabiv117__class_type_infoE"; // abi::__si_class_type_info. static const char * const SIClassTypeInfo = "_ZTVN10__cxxabiv120__si_class_type_infoE"; // abi::__vmi_class_type_info. static const char * const VMIClassTypeInfo = "_ZTVN10__cxxabiv121__vmi_class_type_infoE"; const char *VTableName = nullptr; switch (Ty->getTypeClass()) { #define TYPE(Class, Base) #define ABSTRACT_TYPE(Class, Base) #define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class: #define NON_CANONICAL_TYPE(Class, Base) case Type::Class: #define DEPENDENT_TYPE(Class, Base) case Type::Class: #include "clang/AST/TypeNodes.inc" llvm_unreachable("Non-canonical and dependent types shouldn't get here"); case Type::LValueReference: case Type::RValueReference: llvm_unreachable("References shouldn't get here"); case Type::Auto: case Type::DeducedTemplateSpecialization: llvm_unreachable("Undeduced type shouldn't get here"); case Type::Pipe: llvm_unreachable("Pipe types shouldn't get here"); case Type::Builtin: case Type::ExtInt: // GCC treats vector and complex types as fundamental types. case Type::Vector: case Type::ExtVector: case Type::ConstantMatrix: case Type::Complex: case Type::Atomic: // FIXME: GCC treats block pointers as fundamental types?! case Type::BlockPointer: // abi::__fundamental_type_info. VTableName = "_ZTVN10__cxxabiv123__fundamental_type_infoE"; break; case Type::ConstantArray: case Type::IncompleteArray: case Type::VariableArray: // abi::__array_type_info. VTableName = "_ZTVN10__cxxabiv117__array_type_infoE"; break; case Type::FunctionNoProto: case Type::FunctionProto: // abi::__function_type_info. VTableName = "_ZTVN10__cxxabiv120__function_type_infoE"; break; case Type::Enum: // abi::__enum_type_info. VTableName = "_ZTVN10__cxxabiv116__enum_type_infoE"; break; case Type::Record: { const CXXRecordDecl *RD = cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl()); if (!RD->hasDefinition() || !RD->getNumBases()) { VTableName = ClassTypeInfo; } else if (CanUseSingleInheritance(RD)) { VTableName = SIClassTypeInfo; } else { VTableName = VMIClassTypeInfo; } break; } case Type::ObjCObject: // Ignore protocol qualifiers. Ty = cast<ObjCObjectType>(Ty)->getBaseType().getTypePtr(); // Handle id and Class. if (isa<BuiltinType>(Ty)) { VTableName = ClassTypeInfo; break; } assert(isa<ObjCInterfaceType>(Ty)); LLVM_FALLTHROUGH; case Type::ObjCInterface: if (cast<ObjCInterfaceType>(Ty)->getDecl()->getSuperClass()) { VTableName = SIClassTypeInfo; } else { VTableName = ClassTypeInfo; } break; case Type::ObjCObjectPointer: case Type::Pointer: // abi::__pointer_type_info. VTableName = "_ZTVN10__cxxabiv119__pointer_type_infoE"; break; case Type::MemberPointer: // abi::__pointer_to_member_type_info. VTableName = "_ZTVN10__cxxabiv129__pointer_to_member_type_infoE"; break; } llvm::Constant *VTable = nullptr; // Check if the alias exists. If it doesn't, then get or create the global. if (CGM.getItaniumVTableContext().isRelativeLayout()) VTable = CGM.getModule().getNamedAlias(VTableName); if (!VTable) VTable = CGM.getModule().getOrInsertGlobal(VTableName, CGM.Int8PtrTy); CGM.setDSOLocal(cast<llvm::GlobalValue>(VTable->stripPointerCasts())); llvm::Type *PtrDiffTy = CGM.getTypes().ConvertType(CGM.getContext().getPointerDiffType()); // The vtable address point is 2. if (CGM.getItaniumVTableContext().isRelativeLayout()) { // The vtable address point is 8 bytes after its start: // 4 for the offset to top + 4 for the relative offset to rtti. llvm::Constant *Eight = llvm::ConstantInt::get(CGM.Int32Ty, 8); VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy); VTable = llvm::ConstantExpr::getInBoundsGetElementPtr(CGM.Int8Ty, VTable, Eight); } else { llvm::Constant *Two = llvm::ConstantInt::get(PtrDiffTy, 2); VTable = llvm::ConstantExpr::getInBoundsGetElementPtr(CGM.Int8PtrTy, VTable, Two); } VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy); Fields.push_back(VTable); } /// Return the linkage that the type info and type info name constants /// should have for the given type. static llvm::GlobalVariable::LinkageTypes getTypeInfoLinkage(CodeGenModule &CGM, QualType Ty) { // Itanium C++ ABI 2.9.5p7: // In addition, it and all of the intermediate abi::__pointer_type_info // structs in the chain down to the abi::__class_type_info for the // incomplete class type must be prevented from resolving to the // corresponding type_info structs for the complete class type, possibly // by making them local static objects. Finally, a dummy class RTTI is // generated for the incomplete type that will not resolve to the final // complete class RTTI (because the latter need not exist), possibly by // making it a local static object. if (ContainsIncompleteClassType(Ty)) return llvm::GlobalValue::InternalLinkage; switch (Ty->getLinkage()) { case NoLinkage: case InternalLinkage: case UniqueExternalLinkage: return llvm::GlobalValue::InternalLinkage; case VisibleNoLinkage: case ModuleInternalLinkage: case ModuleLinkage: case ExternalLinkage: // RTTI is not enabled, which means that this type info struct is going // to be used for exception handling. Give it linkonce_odr linkage. if (!CGM.getLangOpts().RTTI) return llvm::GlobalValue::LinkOnceODRLinkage; if (const RecordType *Record = dyn_cast<RecordType>(Ty)) { const CXXRecordDecl *RD = cast<CXXRecordDecl>(Record->getDecl()); if (RD->hasAttr<WeakAttr>()) return llvm::GlobalValue::WeakODRLinkage; if (CGM.getTriple().isWindowsItaniumEnvironment()) if (RD->hasAttr<DLLImportAttr>() && ShouldUseExternalRTTIDescriptor(CGM, Ty)) return llvm::GlobalValue::ExternalLinkage; // MinGW always uses LinkOnceODRLinkage for type info. if (RD->isDynamicClass() && !CGM.getContext() .getTargetInfo() .getTriple() .isWindowsGNUEnvironment()) return CGM.getVTableLinkage(RD); } return llvm::GlobalValue::LinkOnceODRLinkage; } llvm_unreachable("Invalid linkage!"); } llvm::Constant *ItaniumRTTIBuilder::BuildTypeInfo(QualType Ty) { // We want to operate on the canonical type. Ty = Ty.getCanonicalType(); // Check if we've already emitted an RTTI descriptor for this type. SmallString<256> Name; llvm::raw_svector_ostream Out(Name); CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out); llvm::GlobalVariable *OldGV = CGM.getModule().getNamedGlobal(Name); if (OldGV && !OldGV->isDeclaration()) { assert(!OldGV->hasAvailableExternallyLinkage() && "available_externally typeinfos not yet implemented"); return llvm::ConstantExpr::getBitCast(OldGV, CGM.Int8PtrTy); } // Check if there is already an external RTTI descriptor for this type. if (IsStandardLibraryRTTIDescriptor(Ty) || ShouldUseExternalRTTIDescriptor(CGM, Ty)) return GetAddrOfExternalRTTIDescriptor(Ty); // Emit the standard library with external linkage. llvm::GlobalVariable::LinkageTypes Linkage = getTypeInfoLinkage(CGM, Ty); // Give the type_info object and name the formal visibility of the // type itself. llvm::GlobalValue::VisibilityTypes llvmVisibility; if (llvm::GlobalValue::isLocalLinkage(Linkage)) // If the linkage is local, only default visibility makes sense. llvmVisibility = llvm::GlobalValue::DefaultVisibility; else if (CXXABI.classifyRTTIUniqueness(Ty, Linkage) == ItaniumCXXABI::RUK_NonUniqueHidden) llvmVisibility = llvm::GlobalValue::HiddenVisibility; else llvmVisibility = CodeGenModule::GetLLVMVisibility(Ty->getVisibility()); llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass = llvm::GlobalValue::DefaultStorageClass; if (CGM.getTriple().isWindowsItaniumEnvironment()) { auto RD = Ty->getAsCXXRecordDecl(); if (RD && RD->hasAttr<DLLExportAttr>()) DLLStorageClass = llvm::GlobalValue::DLLExportStorageClass; } return BuildTypeInfo(Ty, Linkage, llvmVisibility, DLLStorageClass); } llvm::Constant *ItaniumRTTIBuilder::BuildTypeInfo( QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage, llvm::GlobalValue::VisibilityTypes Visibility, llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass) { // Add the vtable pointer. BuildVTablePointer(cast<Type>(Ty)); // And the name. llvm::GlobalVariable *TypeName = GetAddrOfTypeName(Ty, Linkage); llvm::Constant *TypeNameField; // If we're supposed to demote the visibility, be sure to set a flag // to use a string comparison for type_info comparisons. ItaniumCXXABI::RTTIUniquenessKind RTTIUniqueness = CXXABI.classifyRTTIUniqueness(Ty, Linkage); if (RTTIUniqueness != ItaniumCXXABI::RUK_Unique) { // The flag is the sign bit, which on ARM64 is defined to be clear // for global pointers. This is very ARM64-specific. TypeNameField = llvm::ConstantExpr::getPtrToInt(TypeName, CGM.Int64Ty); llvm::Constant *flag = llvm::ConstantInt::get(CGM.Int64Ty, ((uint64_t)1) << 63); TypeNameField = llvm::ConstantExpr::getAdd(TypeNameField, flag); TypeNameField = llvm::ConstantExpr::getIntToPtr(TypeNameField, CGM.Int8PtrTy); } else { TypeNameField = llvm::ConstantExpr::getBitCast(TypeName, CGM.Int8PtrTy); } Fields.push_back(TypeNameField); switch (Ty->getTypeClass()) { #define TYPE(Class, Base) #define ABSTRACT_TYPE(Class, Base) #define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class: #define NON_CANONICAL_TYPE(Class, Base) case Type::Class: #define DEPENDENT_TYPE(Class, Base) case Type::Class: #include "clang/AST/TypeNodes.inc" llvm_unreachable("Non-canonical and dependent types shouldn't get here"); // GCC treats vector types as fundamental types. case Type::Builtin: case Type::Vector: case Type::ExtVector: case Type::ConstantMatrix: case Type::Complex: case Type::BlockPointer: // Itanium C++ ABI 2.9.5p4: // abi::__fundamental_type_info adds no data members to std::type_info. break; case Type::LValueReference: case Type::RValueReference: llvm_unreachable("References shouldn't get here"); case Type::Auto: case Type::DeducedTemplateSpecialization: llvm_unreachable("Undeduced type shouldn't get here"); case Type::Pipe: break; case Type::ExtInt: break; case Type::ConstantArray: case Type::IncompleteArray: case Type::VariableArray: // Itanium C++ ABI 2.9.5p5: // abi::__array_type_info adds no data members to std::type_info. break; case Type::FunctionNoProto: case Type::FunctionProto: // Itanium C++ ABI 2.9.5p5: // abi::__function_type_info adds no data members to std::type_info. break; case Type::Enum: // Itanium C++ ABI 2.9.5p5: // abi::__enum_type_info adds no data members to std::type_info. break; case Type::Record: { const CXXRecordDecl *RD = cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl()); if (!RD->hasDefinition() || !RD->getNumBases()) { // We don't need to emit any fields. break; } if (CanUseSingleInheritance(RD)) BuildSIClassTypeInfo(RD); else BuildVMIClassTypeInfo(RD); break; } case Type::ObjCObject: case Type::ObjCInterface: BuildObjCObjectTypeInfo(cast<ObjCObjectType>(Ty)); break; case Type::ObjCObjectPointer: BuildPointerTypeInfo(cast<ObjCObjectPointerType>(Ty)->getPointeeType()); break; case Type::Pointer: BuildPointerTypeInfo(cast<PointerType>(Ty)->getPointeeType()); break; case Type::MemberPointer: BuildPointerToMemberTypeInfo(cast<MemberPointerType>(Ty)); break; case Type::Atomic: // No fields, at least for the moment. break; } llvm::Constant *Init = llvm::ConstantStruct::getAnon(Fields); SmallString<256> Name; llvm::raw_svector_ostream Out(Name); CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out); llvm::Module &M = CGM.getModule(); llvm::GlobalVariable *OldGV = M.getNamedGlobal(Name); llvm::GlobalVariable *GV = new llvm::GlobalVariable(M, Init->getType(), /*isConstant=*/true, Linkage, Init, Name); // If there's already an old global variable, replace it with the new one. if (OldGV) { GV->takeName(OldGV); llvm::Constant *NewPtr = llvm::ConstantExpr::getBitCast(GV, OldGV->getType()); OldGV->replaceAllUsesWith(NewPtr); OldGV->eraseFromParent(); } if (CGM.supportsCOMDAT() && GV->isWeakForLinker()) GV->setComdat(M.getOrInsertComdat(GV->getName())); CharUnits Align = CGM.getContext().toCharUnitsFromBits(CGM.getTarget().getPointerAlign(0)); GV->setAlignment(Align.getAsAlign()); // The Itanium ABI specifies that type_info objects must be globally // unique, with one exception: if the type is an incomplete class // type or a (possibly indirect) pointer to one. That exception // affects the general case of comparing type_info objects produced // by the typeid operator, which is why the comparison operators on // std::type_info generally use the type_info name pointers instead // of the object addresses. However, the language's built-in uses // of RTTI generally require class types to be complete, even when // manipulating pointers to those class types. This allows the // implementation of dynamic_cast to rely on address equality tests, // which is much faster. // All of this is to say that it's important that both the type_info // object and the type_info name be uniqued when weakly emitted. TypeName->setVisibility(Visibility); CGM.setDSOLocal(TypeName); GV->setVisibility(Visibility); CGM.setDSOLocal(GV); TypeName->setDLLStorageClass(DLLStorageClass); GV->setDLLStorageClass(DLLStorageClass); TypeName->setPartition(CGM.getCodeGenOpts().SymbolPartition); GV->setPartition(CGM.getCodeGenOpts().SymbolPartition); return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy); } /// BuildObjCObjectTypeInfo - Build the appropriate kind of type_info /// for the given Objective-C object type. void ItaniumRTTIBuilder::BuildObjCObjectTypeInfo(const ObjCObjectType *OT) { // Drop qualifiers. const Type *T = OT->getBaseType().getTypePtr(); assert(isa<BuiltinType>(T) || isa<ObjCInterfaceType>(T)); // The builtin types are abi::__class_type_infos and don't require // extra fields. if (isa<BuiltinType>(T)) return; ObjCInterfaceDecl *Class = cast<ObjCInterfaceType>(T)->getDecl(); ObjCInterfaceDecl *Super = Class->getSuperClass(); // Root classes are also __class_type_info. if (!Super) return; QualType SuperTy = CGM.getContext().getObjCInterfaceType(Super); // Everything else is single inheritance. llvm::Constant *BaseTypeInfo = ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(SuperTy); Fields.push_back(BaseTypeInfo); } /// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single /// inheritance, according to the Itanium C++ ABI, 2.95p6b. void ItaniumRTTIBuilder::BuildSIClassTypeInfo(const CXXRecordDecl *RD) { // Itanium C++ ABI 2.9.5p6b: // It adds to abi::__class_type_info a single member pointing to the // type_info structure for the base type, llvm::Constant *BaseTypeInfo = ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(RD->bases_begin()->getType()); Fields.push_back(BaseTypeInfo); } namespace { /// SeenBases - Contains virtual and non-virtual bases seen when traversing /// a class hierarchy. struct SeenBases { llvm::SmallPtrSet<const CXXRecordDecl *, 16> NonVirtualBases; llvm::SmallPtrSet<const CXXRecordDecl *, 16> VirtualBases; }; } /// ComputeVMIClassTypeInfoFlags - Compute the value of the flags member in /// abi::__vmi_class_type_info. /// static unsigned ComputeVMIClassTypeInfoFlags(const CXXBaseSpecifier *Base, SeenBases &Bases) { unsigned Flags = 0; auto *BaseDecl = cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl()); if (Base->isVirtual()) { // Mark the virtual base as seen. if (!Bases.VirtualBases.insert(BaseDecl).second) { // If this virtual base has been seen before, then the class is diamond // shaped. Flags |= ItaniumRTTIBuilder::VMI_DiamondShaped; } else { if (Bases.NonVirtualBases.count(BaseDecl)) Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat; } } else { // Mark the non-virtual base as seen. if (!Bases.NonVirtualBases.insert(BaseDecl).second) { // If this non-virtual base has been seen before, then the class has non- // diamond shaped repeated inheritance. Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat; } else { if (Bases.VirtualBases.count(BaseDecl)) Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat; } } // Walk all bases. for (const auto &I : BaseDecl->bases()) Flags |= ComputeVMIClassTypeInfoFlags(&I, Bases); return Flags; } static unsigned ComputeVMIClassTypeInfoFlags(const CXXRecordDecl *RD) { unsigned Flags = 0; SeenBases Bases; // Walk all bases. for (const auto &I : RD->bases()) Flags |= ComputeVMIClassTypeInfoFlags(&I, Bases); return Flags; } /// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for /// classes with bases that do not satisfy the abi::__si_class_type_info /// constraints, according ti the Itanium C++ ABI, 2.9.5p5c. void ItaniumRTTIBuilder::BuildVMIClassTypeInfo(const CXXRecordDecl *RD) { llvm::Type *UnsignedIntLTy = CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy); // Itanium C++ ABI 2.9.5p6c: // __flags is a word with flags describing details about the class // structure, which may be referenced by using the __flags_masks // enumeration. These flags refer to both direct and indirect bases. unsigned Flags = ComputeVMIClassTypeInfoFlags(RD); Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags)); // Itanium C++ ABI 2.9.5p6c: // __base_count is a word with the number of direct proper base class // descriptions that follow. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, RD->getNumBases())); if (!RD->getNumBases()) return; // Now add the base class descriptions. // Itanium C++ ABI 2.9.5p6c: // __base_info[] is an array of base class descriptions -- one for every // direct proper base. Each description is of the type: // // struct abi::__base_class_type_info { // public: // const __class_type_info *__base_type; // long __offset_flags; // // enum __offset_flags_masks { // __virtual_mask = 0x1, // __public_mask = 0x2, // __offset_shift = 8 // }; // }; // If we're in mingw and 'long' isn't wide enough for a pointer, use 'long // long' instead of 'long' for __offset_flags. libstdc++abi uses long long on // LLP64 platforms. // FIXME: Consider updating libc++abi to match, and extend this logic to all // LLP64 platforms. QualType OffsetFlagsTy = CGM.getContext().LongTy; const TargetInfo &TI = CGM.getContext().getTargetInfo(); if (TI.getTriple().isOSCygMing() && TI.getPointerWidth(0) > TI.getLongWidth()) OffsetFlagsTy = CGM.getContext().LongLongTy; llvm::Type *OffsetFlagsLTy = CGM.getTypes().ConvertType(OffsetFlagsTy); for (const auto &Base : RD->bases()) { // The __base_type member points to the RTTI for the base type. Fields.push_back(ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(Base.getType())); auto *BaseDecl = cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl()); int64_t OffsetFlags = 0; // All but the lower 8 bits of __offset_flags are a signed offset. // For a non-virtual base, this is the offset in the object of the base // subobject. For a virtual base, this is the offset in the virtual table of // the virtual base offset for the virtual base referenced (negative). CharUnits Offset; if (Base.isVirtual()) Offset = CGM.getItaniumVTableContext().getVirtualBaseOffsetOffset(RD, BaseDecl); else { const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD); Offset = Layout.getBaseClassOffset(BaseDecl); }; OffsetFlags = uint64_t(Offset.getQuantity()) << 8; // The low-order byte of __offset_flags contains flags, as given by the // masks from the enumeration __offset_flags_masks. if (Base.isVirtual()) OffsetFlags |= BCTI_Virtual; if (Base.getAccessSpecifier() == AS_public) OffsetFlags |= BCTI_Public; Fields.push_back(llvm::ConstantInt::get(OffsetFlagsLTy, OffsetFlags)); } } /// Compute the flags for a __pbase_type_info, and remove the corresponding /// pieces from \p Type. static unsigned extractPBaseFlags(ASTContext &Ctx, QualType &Type) { unsigned Flags = 0; if (Type.isConstQualified()) Flags |= ItaniumRTTIBuilder::PTI_Const; if (Type.isVolatileQualified()) Flags |= ItaniumRTTIBuilder::PTI_Volatile; if (Type.isRestrictQualified()) Flags |= ItaniumRTTIBuilder::PTI_Restrict; Type = Type.getUnqualifiedType(); // Itanium C++ ABI 2.9.5p7: // When the abi::__pbase_type_info is for a direct or indirect pointer to an // incomplete class type, the incomplete target type flag is set. if (ContainsIncompleteClassType(Type)) Flags |= ItaniumRTTIBuilder::PTI_Incomplete; if (auto *Proto = Type->getAs<FunctionProtoType>()) { if (Proto->isNothrow()) { Flags |= ItaniumRTTIBuilder::PTI_Noexcept; Type = Ctx.getFunctionTypeWithExceptionSpec(Type, EST_None); } } return Flags; } /// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct, /// used for pointer types. void ItaniumRTTIBuilder::BuildPointerTypeInfo(QualType PointeeTy) { // Itanium C++ ABI 2.9.5p7: // __flags is a flag word describing the cv-qualification and other // attributes of the type pointed to unsigned Flags = extractPBaseFlags(CGM.getContext(), PointeeTy); llvm::Type *UnsignedIntLTy = CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy); Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags)); // Itanium C++ ABI 2.9.5p7: // __pointee is a pointer to the std::type_info derivation for the // unqualified type being pointed to. llvm::Constant *PointeeTypeInfo = ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(PointeeTy); Fields.push_back(PointeeTypeInfo); } /// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info /// struct, used for member pointer types. void ItaniumRTTIBuilder::BuildPointerToMemberTypeInfo(const MemberPointerType *Ty) { QualType PointeeTy = Ty->getPointeeType(); // Itanium C++ ABI 2.9.5p7: // __flags is a flag word describing the cv-qualification and other // attributes of the type pointed to. unsigned Flags = extractPBaseFlags(CGM.getContext(), PointeeTy); const RecordType *ClassType = cast<RecordType>(Ty->getClass()); if (IsIncompleteClassType(ClassType)) Flags |= PTI_ContainingClassIncomplete; llvm::Type *UnsignedIntLTy = CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy); Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags)); // Itanium C++ ABI 2.9.5p7: // __pointee is a pointer to the std::type_info derivation for the // unqualified type being pointed to. llvm::Constant *PointeeTypeInfo = ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(PointeeTy); Fields.push_back(PointeeTypeInfo); // Itanium C++ ABI 2.9.5p9: // __context is a pointer to an abi::__class_type_info corresponding to the // class type containing the member pointed to // (e.g., the "A" in "int A::*"). Fields.push_back( ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(QualType(ClassType, 0))); } llvm::Constant *ItaniumCXXABI::getAddrOfRTTIDescriptor(QualType Ty) { return ItaniumRTTIBuilder(*this).BuildTypeInfo(Ty); } void ItaniumCXXABI::EmitFundamentalRTTIDescriptors(const CXXRecordDecl *RD) { // Types added here must also be added to TypeInfoIsInStandardLibrary. QualType FundamentalTypes[] = { getContext().VoidTy, getContext().NullPtrTy, getContext().BoolTy, getContext().WCharTy, getContext().CharTy, getContext().UnsignedCharTy, getContext().SignedCharTy, getContext().ShortTy, getContext().UnsignedShortTy, getContext().IntTy, getContext().UnsignedIntTy, getContext().LongTy, getContext().UnsignedLongTy, getContext().LongLongTy, getContext().UnsignedLongLongTy, getContext().Int128Ty, getContext().UnsignedInt128Ty, getContext().HalfTy, getContext().FloatTy, getContext().DoubleTy, getContext().LongDoubleTy, getContext().Float128Ty, getContext().Char8Ty, getContext().Char16Ty, getContext().Char32Ty }; llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass = RD->hasAttr<DLLExportAttr>() ? llvm::GlobalValue::DLLExportStorageClass : llvm::GlobalValue::DefaultStorageClass; llvm::GlobalValue::VisibilityTypes Visibility = CodeGenModule::GetLLVMVisibility(RD->getVisibility()); for (const QualType &FundamentalType : FundamentalTypes) { QualType PointerType = getContext().getPointerType(FundamentalType); QualType PointerTypeConst = getContext().getPointerType( FundamentalType.withConst()); for (QualType Type : {FundamentalType, PointerType, PointerTypeConst}) ItaniumRTTIBuilder(*this).BuildTypeInfo( Type, llvm::GlobalValue::ExternalLinkage, Visibility, DLLStorageClass); } } /// What sort of uniqueness rules should we use for the RTTI for the /// given type? ItaniumCXXABI::RTTIUniquenessKind ItaniumCXXABI::classifyRTTIUniqueness( QualType CanTy, llvm::GlobalValue::LinkageTypes Linkage) const { if (shouldRTTIBeUnique()) return RUK_Unique; // It's only necessary for linkonce_odr or weak_odr linkage. if (Linkage != llvm::GlobalValue::LinkOnceODRLinkage && Linkage != llvm::GlobalValue::WeakODRLinkage) return RUK_Unique; // It's only necessary with default visibility. if (CanTy->getVisibility() != DefaultVisibility) return RUK_Unique; // If we're not required to publish this symbol, hide it. if (Linkage == llvm::GlobalValue::LinkOnceODRLinkage) return RUK_NonUniqueHidden; // If we're required to publish this symbol, as we might be under an // explicit instantiation, leave it with default visibility but // enable string-comparisons. assert(Linkage == llvm::GlobalValue::WeakODRLinkage); return RUK_NonUniqueVisible; } // Find out how to codegen the complete destructor and constructor namespace { enum class StructorCodegen { Emit, RAUW, Alias, COMDAT }; } static StructorCodegen getCodegenToUse(CodeGenModule &CGM, const CXXMethodDecl *MD) { if (!CGM.getCodeGenOpts().CXXCtorDtorAliases) return StructorCodegen::Emit; // The complete and base structors are not equivalent if there are any virtual // bases, so emit separate functions. if (MD->getParent()->getNumVBases()) return StructorCodegen::Emit; GlobalDecl AliasDecl; if (const auto *DD = dyn_cast<CXXDestructorDecl>(MD)) { AliasDecl = GlobalDecl(DD, Dtor_Complete); } else { const auto *CD = cast<CXXConstructorDecl>(MD); AliasDecl = GlobalDecl(CD, Ctor_Complete); } llvm::GlobalValue::LinkageTypes Linkage = CGM.getFunctionLinkage(AliasDecl); if (llvm::GlobalValue::isDiscardableIfUnused(Linkage)) return StructorCodegen::RAUW; // FIXME: Should we allow available_externally aliases? if (!llvm::GlobalAlias::isValidLinkage(Linkage)) return StructorCodegen::RAUW; if (llvm::GlobalValue::isWeakForLinker(Linkage)) { // Only ELF and wasm support COMDATs with arbitrary names (C5/D5). if (CGM.getTarget().getTriple().isOSBinFormatELF() || CGM.getTarget().getTriple().isOSBinFormatWasm()) return StructorCodegen::COMDAT; return StructorCodegen::Emit; } return StructorCodegen::Alias; } static void emitConstructorDestructorAlias(CodeGenModule &CGM, GlobalDecl AliasDecl, GlobalDecl TargetDecl) { llvm::GlobalValue::LinkageTypes Linkage = CGM.getFunctionLinkage(AliasDecl); StringRef MangledName = CGM.getMangledName(AliasDecl); llvm::GlobalValue *Entry = CGM.GetGlobalValue(MangledName); if (Entry && !Entry->isDeclaration()) return; auto *Aliasee = cast<llvm::GlobalValue>(CGM.GetAddrOfGlobal(TargetDecl)); // Create the alias with no name. auto *Alias = llvm::GlobalAlias::create(Linkage, "", Aliasee); // Constructors and destructors are always unnamed_addr. Alias->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global); // Switch any previous uses to the alias. if (Entry) { assert(Entry->getType() == Aliasee->getType() && "declaration exists with different type"); Alias->takeName(Entry); Entry->replaceAllUsesWith(Alias); Entry->eraseFromParent(); } else { Alias->setName(MangledName); } // Finally, set up the alias with its proper name and attributes. CGM.SetCommonAttributes(AliasDecl, Alias); } void ItaniumCXXABI::emitCXXStructor(GlobalDecl GD) { auto *MD = cast<CXXMethodDecl>(GD.getDecl()); auto *CD = dyn_cast<CXXConstructorDecl>(MD); const CXXDestructorDecl *DD = CD ? nullptr : cast<CXXDestructorDecl>(MD); StructorCodegen CGType = getCodegenToUse(CGM, MD); if (CD ? GD.getCtorType() == Ctor_Complete : GD.getDtorType() == Dtor_Complete) { GlobalDecl BaseDecl; if (CD) BaseDecl = GD.getWithCtorType(Ctor_Base); else BaseDecl = GD.getWithDtorType(Dtor_Base); if (CGType == StructorCodegen::Alias || CGType == StructorCodegen::COMDAT) { emitConstructorDestructorAlias(CGM, GD, BaseDecl); return; } if (CGType == StructorCodegen::RAUW) { StringRef MangledName = CGM.getMangledName(GD); auto *Aliasee = CGM.GetAddrOfGlobal(BaseDecl); CGM.addReplacement(MangledName, Aliasee); return; } } // The base destructor is equivalent to the base destructor of its // base class if there is exactly one non-virtual base class with a // non-trivial destructor, there are no fields with a non-trivial // destructor, and the body of the destructor is trivial. if (DD && GD.getDtorType() == Dtor_Base && CGType != StructorCodegen::COMDAT && !CGM.TryEmitBaseDestructorAsAlias(DD)) return; // FIXME: The deleting destructor is equivalent to the selected operator // delete if: // * either the delete is a destroying operator delete or the destructor // would be trivial if it weren't virtual, // * the conversion from the 'this' parameter to the first parameter of the // destructor is equivalent to a bitcast, // * the destructor does not have an implicit "this" return, and // * the operator delete has the same calling convention and IR function type // as the destructor. // In such cases we should try to emit the deleting dtor as an alias to the // selected 'operator delete'. llvm::Function *Fn = CGM.codegenCXXStructor(GD); if (CGType == StructorCodegen::COMDAT) { SmallString<256> Buffer; llvm::raw_svector_ostream Out(Buffer); if (DD) getMangleContext().mangleCXXDtorComdat(DD, Out); else getMangleContext().mangleCXXCtorComdat(CD, Out); llvm::Comdat *C = CGM.getModule().getOrInsertComdat(Out.str()); Fn->setComdat(C); } else { CGM.maybeSetTrivialComdat(*MD, *Fn); } } static llvm::FunctionCallee getBeginCatchFn(CodeGenModule &CGM) { // void *__cxa_begin_catch(void*); llvm::FunctionType *FTy = llvm::FunctionType::get( CGM.Int8PtrTy, CGM.Int8PtrTy, /*isVarArg=*/false); return CGM.CreateRuntimeFunction(FTy, "__cxa_begin_catch"); } static llvm::FunctionCallee getEndCatchFn(CodeGenModule &CGM) { // void __cxa_end_catch(); llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false); return CGM.CreateRuntimeFunction(FTy, "__cxa_end_catch"); } static llvm::FunctionCallee getGetExceptionPtrFn(CodeGenModule &CGM) { // void *__cxa_get_exception_ptr(void*); llvm::FunctionType *FTy = llvm::FunctionType::get( CGM.Int8PtrTy, CGM.Int8PtrTy, /*isVarArg=*/false); return CGM.CreateRuntimeFunction(FTy, "__cxa_get_exception_ptr"); } namespace { /// A cleanup to call __cxa_end_catch. In many cases, the caught /// exception type lets us state definitively that the thrown exception /// type does not have a destructor. In particular: /// - Catch-alls tell us nothing, so we have to conservatively /// assume that the thrown exception might have a destructor. /// - Catches by reference behave according to their base types. /// - Catches of non-record types will only trigger for exceptions /// of non-record types, which never have destructors. /// - Catches of record types can trigger for arbitrary subclasses /// of the caught type, so we have to assume the actual thrown /// exception type might have a throwing destructor, even if the /// caught type's destructor is trivial or nothrow. struct CallEndCatch final : EHScopeStack::Cleanup { CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {} bool MightThrow; void Emit(CodeGenFunction &CGF, Flags flags) override { if (!MightThrow) { CGF.EmitNounwindRuntimeCall(getEndCatchFn(CGF.CGM)); return; } CGF.EmitRuntimeCallOrInvoke(getEndCatchFn(CGF.CGM)); } }; } /// Emits a call to __cxa_begin_catch and enters a cleanup to call /// __cxa_end_catch. /// /// \param EndMightThrow - true if __cxa_end_catch might throw static llvm::Value *CallBeginCatch(CodeGenFunction &CGF, llvm::Value *Exn, bool EndMightThrow) { llvm::CallInst *call = CGF.EmitNounwindRuntimeCall(getBeginCatchFn(CGF.CGM), Exn); CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow); return call; } /// A "special initializer" callback for initializing a catch /// parameter during catch initialization. static void InitCatchParam(CodeGenFunction &CGF, const VarDecl &CatchParam, Address ParamAddr, SourceLocation Loc) { // Load the exception from where the landing pad saved it. llvm::Value *Exn = CGF.getExceptionFromSlot(); CanQualType CatchType = CGF.CGM.getContext().getCanonicalType(CatchParam.getType()); llvm::Type *LLVMCatchTy = CGF.ConvertTypeForMem(CatchType); // If we're catching by reference, we can just cast the object // pointer to the appropriate pointer. if (isa<ReferenceType>(CatchType)) { QualType CaughtType = cast<ReferenceType>(CatchType)->getPointeeType(); bool EndCatchMightThrow = CaughtType->isRecordType(); // __cxa_begin_catch returns the adjusted object pointer. llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, EndCatchMightThrow); // We have no way to tell the personality function that we're // catching by reference, so if we're catching a pointer, // __cxa_begin_catch will actually return that pointer by value. if (const PointerType *PT = dyn_cast<PointerType>(CaughtType)) { QualType PointeeType = PT->getPointeeType(); // When catching by reference, generally we should just ignore // this by-value pointer and use the exception object instead. if (!PointeeType->isRecordType()) { // Exn points to the struct _Unwind_Exception header, which // we have to skip past in order to reach the exception data. unsigned HeaderSize = CGF.CGM.getTargetCodeGenInfo().getSizeOfUnwindException(); AdjustedExn = CGF.Builder.CreateConstGEP1_32(Exn, HeaderSize); // However, if we're catching a pointer-to-record type that won't // work, because the personality function might have adjusted // the pointer. There's actually no way for us to fully satisfy // the language/ABI contract here: we can't use Exn because it // might have the wrong adjustment, but we can't use the by-value // pointer because it's off by a level of abstraction. // // The current solution is to dump the adjusted pointer into an // alloca, which breaks language semantics (because changing the // pointer doesn't change the exception) but at least works. // The better solution would be to filter out non-exact matches // and rethrow them, but this is tricky because the rethrow // really needs to be catchable by other sites at this landing // pad. The best solution is to fix the personality function. } else { // Pull the pointer for the reference type off. llvm::Type *PtrTy = cast<llvm::PointerType>(LLVMCatchTy)->getElementType(); // Create the temporary and write the adjusted pointer into it. Address ExnPtrTmp = CGF.CreateTempAlloca(PtrTy, CGF.getPointerAlign(), "exn.byref.tmp"); llvm::Value *Casted = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); CGF.Builder.CreateStore(Casted, ExnPtrTmp); // Bind the reference to the temporary. AdjustedExn = ExnPtrTmp.getPointer(); } } llvm::Value *ExnCast = CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.byref"); CGF.Builder.CreateStore(ExnCast, ParamAddr); return; } // Scalars and complexes. TypeEvaluationKind TEK = CGF.getEvaluationKind(CatchType); if (TEK != TEK_Aggregate) { llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, false); // If the catch type is a pointer type, __cxa_begin_catch returns // the pointer by value. if (CatchType->hasPointerRepresentation()) { llvm::Value *CastExn = CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.casted"); switch (CatchType.getQualifiers().getObjCLifetime()) { case Qualifiers::OCL_Strong: CastExn = CGF.EmitARCRetainNonBlock(CastExn); LLVM_FALLTHROUGH; case Qualifiers::OCL_None: case Qualifiers::OCL_ExplicitNone: case Qualifiers::OCL_Autoreleasing: CGF.Builder.CreateStore(CastExn, ParamAddr); return; case Qualifiers::OCL_Weak: CGF.EmitARCInitWeak(ParamAddr, CastExn); return; } llvm_unreachable("bad ownership qualifier!"); } // Otherwise, it returns a pointer into the exception object. llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); LValue srcLV = CGF.MakeNaturalAlignAddrLValue(Cast, CatchType); LValue destLV = CGF.MakeAddrLValue(ParamAddr, CatchType); switch (TEK) { case TEK_Complex: CGF.EmitStoreOfComplex(CGF.EmitLoadOfComplex(srcLV, Loc), destLV, /*init*/ true); return; case TEK_Scalar: { llvm::Value *ExnLoad = CGF.EmitLoadOfScalar(srcLV, Loc); CGF.EmitStoreOfScalar(ExnLoad, destLV, /*init*/ true); return; } case TEK_Aggregate: llvm_unreachable("evaluation kind filtered out!"); } llvm_unreachable("bad evaluation kind"); } assert(isa<RecordType>(CatchType) && "unexpected catch type!"); auto catchRD = CatchType->getAsCXXRecordDecl(); CharUnits caughtExnAlignment = CGF.CGM.getClassPointerAlignment(catchRD); llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok // Check for a copy expression. If we don't have a copy expression, // that means a trivial copy is okay. const Expr *copyExpr = CatchParam.getInit(); if (!copyExpr) { llvm::Value *rawAdjustedExn = CallBeginCatch(CGF, Exn, true); Address adjustedExn(CGF.Builder.CreateBitCast(rawAdjustedExn, PtrTy), caughtExnAlignment); LValue Dest = CGF.MakeAddrLValue(ParamAddr, CatchType); LValue Src = CGF.MakeAddrLValue(adjustedExn, CatchType); CGF.EmitAggregateCopy(Dest, Src, CatchType, AggValueSlot::DoesNotOverlap); return; } // We have to call __cxa_get_exception_ptr to get the adjusted // pointer before copying. llvm::CallInst *rawAdjustedExn = CGF.EmitNounwindRuntimeCall(getGetExceptionPtrFn(CGF.CGM), Exn); // Cast that to the appropriate type. Address adjustedExn(CGF.Builder.CreateBitCast(rawAdjustedExn, PtrTy), caughtExnAlignment); // The copy expression is defined in terms of an OpaqueValueExpr. // Find it and map it to the adjusted expression. CodeGenFunction::OpaqueValueMapping opaque(CGF, OpaqueValueExpr::findInCopyConstruct(copyExpr), CGF.MakeAddrLValue(adjustedExn, CatchParam.getType())); // Call the copy ctor in a terminate scope. CGF.EHStack.pushTerminate(); // Perform the copy construction. CGF.EmitAggExpr(copyExpr, AggValueSlot::forAddr(ParamAddr, Qualifiers(), AggValueSlot::IsNotDestructed, AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased, AggValueSlot::DoesNotOverlap)); // Leave the terminate scope. CGF.EHStack.popTerminate(); // Undo the opaque value mapping. opaque.pop(); // Finally we can call __cxa_begin_catch. CallBeginCatch(CGF, Exn, true); } /// Begins a catch statement by initializing the catch variable and /// calling __cxa_begin_catch. void ItaniumCXXABI::emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *S) { // We have to be very careful with the ordering of cleanups here: // C++ [except.throw]p4: // The destruction [of the exception temporary] occurs // immediately after the destruction of the object declared in // the exception-declaration in the handler. // // So the precise ordering is: // 1. Construct catch variable. // 2. __cxa_begin_catch // 3. Enter __cxa_end_catch cleanup // 4. Enter dtor cleanup // // We do this by using a slightly abnormal initialization process. // Delegation sequence: // - ExitCXXTryStmt opens a RunCleanupsScope // - EmitAutoVarAlloca creates the variable and debug info // - InitCatchParam initializes the variable from the exception // - CallBeginCatch calls __cxa_begin_catch // - CallBeginCatch enters the __cxa_end_catch cleanup // - EmitAutoVarCleanups enters the variable destructor cleanup // - EmitCXXTryStmt emits the code for the catch body // - EmitCXXTryStmt close the RunCleanupsScope VarDecl *CatchParam = S->getExceptionDecl(); if (!CatchParam) { llvm::Value *Exn = CGF.getExceptionFromSlot(); CallBeginCatch(CGF, Exn, true); return; } // Emit the local. CodeGenFunction::AutoVarEmission var = CGF.EmitAutoVarAlloca(*CatchParam); InitCatchParam(CGF, *CatchParam, var.getObjectAddress(CGF), S->getBeginLoc()); CGF.EmitAutoVarCleanups(var); } /// Get or define the following function: /// void @__clang_call_terminate(i8* %exn) nounwind noreturn /// This code is used only in C++. static llvm::FunctionCallee getClangCallTerminateFn(CodeGenModule &CGM) { llvm::FunctionType *fnTy = llvm::FunctionType::get(CGM.VoidTy, CGM.Int8PtrTy, /*isVarArg=*/false); llvm::FunctionCallee fnRef = CGM.CreateRuntimeFunction( fnTy, "__clang_call_terminate", llvm::AttributeList(), /*Local=*/true); llvm::Function *fn = cast<llvm::Function>(fnRef.getCallee()->stripPointerCasts()); if (fn->empty()) { fn->setDoesNotThrow(); fn->setDoesNotReturn(); // What we really want is to massively penalize inlining without // forbidding it completely. The difference between that and // 'noinline' is negligible. fn->addFnAttr(llvm::Attribute::NoInline); // Allow this function to be shared across translation units, but // we don't want it to turn into an exported symbol. fn->setLinkage(llvm::Function::LinkOnceODRLinkage); fn->setVisibility(llvm::Function::HiddenVisibility); if (CGM.supportsCOMDAT()) fn->setComdat(CGM.getModule().getOrInsertComdat(fn->getName())); // Set up the function. llvm::BasicBlock *entry = llvm::BasicBlock::Create(CGM.getLLVMContext(), "", fn); CGBuilderTy builder(CGM, entry); // Pull the exception pointer out of the parameter list. llvm::Value *exn = &*fn->arg_begin(); // Call __cxa_begin_catch(exn). llvm::CallInst *catchCall = builder.CreateCall(getBeginCatchFn(CGM), exn); catchCall->setDoesNotThrow(); catchCall->setCallingConv(CGM.getRuntimeCC()); // Call std::terminate(). llvm::CallInst *termCall = builder.CreateCall(CGM.getTerminateFn()); termCall->setDoesNotThrow(); termCall->setDoesNotReturn(); termCall->setCallingConv(CGM.getRuntimeCC()); // std::terminate cannot return. builder.CreateUnreachable(); } return fnRef; } llvm::CallInst * ItaniumCXXABI::emitTerminateForUnexpectedException(CodeGenFunction &CGF, llvm::Value *Exn) { // In C++, we want to call __cxa_begin_catch() before terminating. if (Exn) { assert(CGF.CGM.getLangOpts().CPlusPlus); return CGF.EmitNounwindRuntimeCall(getClangCallTerminateFn(CGF.CGM), Exn); } return CGF.EmitNounwindRuntimeCall(CGF.CGM.getTerminateFn()); } std::pair<llvm::Value *, const CXXRecordDecl *> ItaniumCXXABI::LoadVTablePtr(CodeGenFunction &CGF, Address This, const CXXRecordDecl *RD) { return {CGF.GetVTablePtr(This, CGM.Int8PtrTy, RD), RD}; } void WebAssemblyCXXABI::emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) { if (CGF.getTarget().hasFeature("exception-handling")) CGF.EHStack.pushCleanup<CatchRetScope>( NormalCleanup, cast<llvm::CatchPadInst>(CGF.CurrentFuncletPad)); ItaniumCXXABI::emitBeginCatch(CGF, C); } /// Register a global destructor as best as we know how. void XLCXXABI::registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D, llvm::FunctionCallee dtor, llvm::Constant *addr) { if (D.getTLSKind() != VarDecl::TLS_None) llvm::report_fatal_error("thread local storage not yet implemented on AIX"); // Create __dtor function for the var decl. llvm::Function *dtorStub = CGF.createAtExitStub(D, dtor, addr); // Register above __dtor with atexit(). CGF.registerGlobalDtorWithAtExit(dtorStub); // Emit __finalize function to unregister __dtor and (as appropriate) call // __dtor. emitCXXStermFinalizer(D, dtorStub, addr); } void XLCXXABI::emitCXXStermFinalizer(const VarDecl &D, llvm::Function *dtorStub, llvm::Constant *addr) { llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, false); SmallString<256> FnName; { llvm::raw_svector_ostream Out(FnName); getMangleContext().mangleDynamicStermFinalizer(&D, Out); } // Create the finalization action associated with a variable. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction(); llvm::Function *StermFinalizer = CGM.CreateGlobalInitOrCleanUpFunction( FTy, FnName.str(), FI, D.getLocation()); CodeGenFunction CGF(CGM); CGF.StartFunction(GlobalDecl(), CGM.getContext().VoidTy, StermFinalizer, FI, FunctionArgList(), D.getLocation(), D.getInit()->getExprLoc()); // The unatexit subroutine unregisters __dtor functions that were previously // registered by the atexit subroutine. If the referenced function is found, // the unatexit returns a value of 0, meaning that the cleanup is still // pending (and we should call the __dtor function). llvm::Value *V = CGF.unregisterGlobalDtorWithUnAtExit(dtorStub); llvm::Value *NeedsDestruct = CGF.Builder.CreateIsNull(V, "needs_destruct"); llvm::BasicBlock *DestructCallBlock = CGF.createBasicBlock("destruct.call"); llvm::BasicBlock *EndBlock = CGF.createBasicBlock("destruct.end"); // Check if unatexit returns a value of 0. If it does, jump to // DestructCallBlock, otherwise jump to EndBlock directly. CGF.Builder.CreateCondBr(NeedsDestruct, DestructCallBlock, EndBlock); CGF.EmitBlock(DestructCallBlock); // Emit the call to dtorStub. llvm::CallInst *CI = CGF.Builder.CreateCall(dtorStub); // Make sure the call and the callee agree on calling convention. CI->setCallingConv(dtorStub->getCallingConv()); CGF.EmitBlock(EndBlock); CGF.FinishFunction(); assert(!D.getAttr<InitPriorityAttr>() && "Prioritized sinit and sterm functions are not yet supported."); if (isTemplateInstantiation(D.getTemplateSpecializationKind()) || getContext().GetGVALinkageForVariable(&D) == GVA_DiscardableODR) // According to C++ [basic.start.init]p2, class template static data // members (i.e., implicitly or explicitly instantiated specializations) // have unordered initialization. As a consequence, we can put them into // their own llvm.global_dtors entry. CGM.AddCXXStermFinalizerToGlobalDtor(StermFinalizer, 65535); else CGM.AddCXXStermFinalizerEntry(StermFinalizer); }
google/llvm-propeller
clang/lib/CodeGen/ItaniumCXXABI.cpp
C++
apache-2.0
181,535
package com.huawei.esdk.fusioncompute.local.model.net; import java.util.List; /** * Subnet详细信息 * <p> * @since eSDK Cloud V100R003C50 */ public class Subnet { /** * Subnet 标识。 */ private String urn; /** * 访问subnet所用的uri。 */ private String uri; /** * Subnet名称,长度[1,256]。 */ private String name; /** * Subnet使用的Vlan号 ,范围:1 - 4094。 */ private Integer vlanId; /** * 【可选】网关IP地址。 */ private String gateway; /** * IP地址。 */ private String netAddr; /** * 掩码,范围:1-31。 */ private Integer netMask; /** * 【可选】系统预留IP地址。 *(说明:此字段专供一体机使用,GM创建subnet时选择此字段,就不需要再选择gateway,同样,选择了gateway,就不需要再选择此字段。) */ private String sysReserveIp; /** * 【可选】保留地址段,列表最大数量为3。 * 如:"192.168.0.1-192.168.0.5" */ private List<String> reserveIps; /** * 【可选】描述信息,长度[0,1024]。 */ private String description; /** * dhcpOption详细信息。 */ private List<DhcpOption> dhcpOption; /** * 已分配地址数量,查询指定Subnet信息接口时返回值使用。 */ private Integer allocateAddrNum; /** * 可用地址数量,查询指定Subnet信息接口时返回值使用。 */ private Integer availableAddrNum; public String getUrn() { return urn; } public void setUrn(String urn) { this.urn = urn; } public String getUri() { return uri; } public void setUri(String uri) { this.uri = uri; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Integer getVlanId() { return vlanId; } public void setVlanId(Integer vlanId) { this.vlanId = vlanId; } public String getGateway() { return gateway; } public void setGateway(String gateway) { this.gateway = gateway; } public String getNetAddr() { return netAddr; } public void setNetAddr(String netAddr) { this.netAddr = netAddr; } public Integer getNetMask() { return netMask; } public void setNetMask(Integer netMask) { this.netMask = netMask; } public String getSysReserveIp() { return sysReserveIp; } public void setSysReserveIp(String sysReserveIp) { this.sysReserveIp = sysReserveIp; } public List<String> getReserveIps() { return reserveIps; } public void setReserveIps(List<String> reserveIps) { this.reserveIps = reserveIps; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public List<DhcpOption> getDhcpOption() { return dhcpOption; } public void setDhcpOption(List<DhcpOption> dhcpOption) { this.dhcpOption = dhcpOption; } public Integer getAllocateAddrNum() { return allocateAddrNum; } public void setAllocateAddrNum(Integer allocateAddrNum) { this.allocateAddrNum = allocateAddrNum; } public Integer getAvailableAddrNum() { return availableAddrNum; } public void setAvailableAddrNum(Integer availableAddrNum) { this.availableAddrNum = availableAddrNum; } }
eSDK/esdk_cloud_fc_native_java
source/src/main/java/com/huawei/esdk/fusioncompute/local/model/net/Subnet.java
Java
apache-2.0
3,959
package org.ansj.recognition.arrimpl; import org.ansj.domain.Term; import org.ansj.recognition.TermArrRecognition; import org.ansj.util.MyStaticValue; import org.ansj.util.TermUtil; public class NumRecognition implements TermArrRecognition { /** * 数字+数字合并,zheng * * @param terms */ public void recognition(Term[] terms) { int length = terms.length - 1; Term from = null; Term to = null; Term temp = null; for (int i = 0; i < length; i++) { if (terms[i] == null) { continue; } else if (".".equals(terms[i].getName()) || ".".equals(terms[i].getName())) { // 如果是.前后都为数字进行特殊处理 to = terms[i].to(); from = terms[i].from(); if (from.termNatures().numAttr.flag && to.termNatures().numAttr.flag) { from.setName(from.getName() + "." + to.getName()); TermUtil.termLink(from, to.to()); terms[to.getOffe()] = null; terms[i] = null; i = from.getOffe() - 1; } continue; } else if (!terms[i].termNatures().numAttr.flag) { continue; } temp = terms[i]; // 将所有的数字合并 while ((temp = temp.to()).termNatures().numAttr.flag) { terms[i].setName(terms[i].getName() + temp.getName()); } // 如果是数字结尾 if (MyStaticValue.isQuantifierRecognition && temp.termNatures().numAttr.numEndFreq > 0) { terms[i].setName(terms[i].getName() + temp.getName()); temp = temp.to(); } // 如果不等,说明terms[i]发生了改变 if (terms[i].to() != temp) { TermUtil.termLink(terms[i], temp); // 将中间无用元素设置为null for (int j = i + 1; j < temp.getOffe(); j++) { terms[j] = null; } i = temp.getOffe() - 1; } } } }
waiteryee1/ansj_seg
src/main/java/org/ansj/recognition/arrimpl/NumRecognition.java
Java
apache-2.0
1,786
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.ComponentModel; using System.Runtime.CompilerServices; using System.Text.RegularExpressions; using NodaTime; using QuantConnect.Data.Consolidators; using QuantConnect.Securities; namespace QuantConnect.Data { /// <summary> /// Subscription data required including the type of data. /// </summary> public class SubscriptionDataConfig { private Symbol _symbol; private string _mappedSymbol; private readonly string _sid; /// <summary> /// Type of data /// </summary> public readonly Type Type; /// <summary> /// Security type of this data subscription /// </summary> public readonly SecurityType SecurityType; /// <summary> /// Symbol of the asset we're requesting: this is really a perm tick!! /// </summary> public Symbol Symbol { get { return _symbol; } } /// <summary> /// Resolution of the asset we're requesting, second minute or tick /// </summary> public readonly Resolution Resolution; /// <summary> /// Timespan increment between triggers of this data: /// </summary> public readonly TimeSpan Increment; /// <summary> /// True if wish to send old data when time gaps in data feed. /// </summary> public readonly bool FillDataForward; /// <summary> /// Boolean Send Data from between 4am - 8am (Equities Setting Only) /// </summary> public readonly bool ExtendedMarketHours; /// <summary> /// True if this subscription was added for the sole purpose of providing currency conversion rates via <see cref="CashBook.EnsureCurrencyDataFeeds"/> /// </summary> public readonly bool IsInternalFeed; /// <summary> /// True if this subscription is for custom user data, false for QC data /// </summary> public readonly bool IsCustomData; /// <summary> /// The sum of dividends accrued in this subscription, used for scaling total return prices /// </summary> public decimal SumOfDividends; /// <summary> /// Gets the normalization mode used for this subscription /// </summary> public DataNormalizationMode DataNormalizationMode = DataNormalizationMode.Adjusted; /// <summary> /// Price Scaling Factor: /// </summary> public decimal PriceScaleFactor; /// <summary> /// Symbol Mapping: When symbols change over time (e.g. CHASE-> JPM) need to update the symbol requested. /// </summary> public string MappedSymbol { get { return _mappedSymbol; } set { _mappedSymbol = value; _symbol = new Symbol(_sid, value); } } /// <summary> /// Gets the market / scope of the symbol /// </summary> public readonly string Market; /// <summary> /// Gets the time zone for this subscription /// </summary> public readonly DateTimeZone TimeZone; /// <summary> /// Consolidators that are registred with this subscription /// </summary> public readonly HashSet<IDataConsolidator> Consolidators; /// <summary> /// Constructor for Data Subscriptions /// </summary> /// <param name="objectType">Type of the data objects.</param> /// <param name="securityType">SecurityType Enum Set Equity/FOREX/Futures etc.</param> /// <param name="symbol">Symbol of the asset we're requesting</param> /// <param name="resolution">Resolution of the asset we're requesting</param> /// <param name="market">The market this subscription comes from</param> /// <param name="timeZone">The time zone the raw data is time stamped in</param> /// <param name="fillForward">Fill in gaps with historical data</param> /// <param name="extendedHours">Equities only - send in data from 4am - 8pm</param> /// <param name="isInternalFeed">Set to true if this subscription is added for the sole purpose of providing currency conversion rates, /// setting this flag to true will prevent the data from being sent into the algorithm's OnData methods</param> /// <param name="isCustom">True if this is user supplied custom data, false for normal QC data</param> public SubscriptionDataConfig(Type objectType, SecurityType securityType, Symbol symbol, Resolution resolution, string market, DateTimeZone timeZone, bool fillForward, bool extendedHours, bool isInternalFeed, bool isCustom = false) { Type = objectType; SecurityType = securityType; Resolution = resolution; _sid = symbol.Permtick; FillDataForward = fillForward; ExtendedMarketHours = extendedHours; PriceScaleFactor = 1; MappedSymbol = symbol.Value; IsInternalFeed = isInternalFeed; IsCustomData = isCustom; Market = market; TimeZone = timeZone; Consolidators = new HashSet<IDataConsolidator>(); // verify the market string contains letters a-Z if (string.IsNullOrWhiteSpace(market)) { throw new ArgumentException("The market cannot be an empty string."); } if (!Regex.IsMatch(market, @"^[a-zA-Z]+$")) { throw new ArgumentException("The market must only contain letters A-Z."); } switch (resolution) { case Resolution.Tick: //Ticks are individual sales and fillforward doesn't apply. Increment = TimeSpan.FromSeconds(0); FillDataForward = false; break; case Resolution.Second: Increment = TimeSpan.FromSeconds(1); break; case Resolution.Minute: Increment = TimeSpan.FromMinutes(1); break; case Resolution.Hour: Increment = TimeSpan.FromHours(1); break; case Resolution.Daily: Increment = TimeSpan.FromDays(1); break; default: throw new InvalidEnumArgumentException("Unexpected Resolution: " + resolution); } } /// <summary> /// Normalizes the specified price based on the DataNormalizationMode /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public decimal GetNormalizedPrice(decimal price) { switch (DataNormalizationMode) { case DataNormalizationMode.Raw: return price; // the price scale factor will be set accordingly based on the mode in update scale factors case DataNormalizationMode.Adjusted: case DataNormalizationMode.SplitAdjusted: return price*PriceScaleFactor; case DataNormalizationMode.TotalReturn: return (price*PriceScaleFactor) + SumOfDividends; default: throw new ArgumentOutOfRangeException(); } } } }
desimonk/Lean
Common/Data/SubscriptionDataConfig.cs
C#
apache-2.0
8,452
;(function($){ /** * jqGrid extension for manipulating Grid Data * Tony Tomov tony@trirand.com * http://trirand.com/blog/ * Dual licensed under the MIT and GPL licenses: * http://www.opensource.org/licenses/mit-license.php * http://www.gnu.org/licenses/gpl-2.0.html **/ //jsHint options /*global alert, $, jQuery */ "use strict"; $.jgrid.inlineEdit = $.jgrid.inlineEdit || {}; $.jgrid.extend({ //Editing editRow : function(rowid,keys,oneditfunc,successfunc, url, extraparam, aftersavefunc,errorfunc, afterrestorefunc) { // Compatible mode old versions var o={}, args = $.makeArray(arguments).slice(1); if( $.type(args[0]) === "object" ) { o = args[0]; } else { if (typeof keys !== "undefined") { o.keys = keys; } if ($.isFunction(oneditfunc)) { o.oneditfunc = oneditfunc; } if ($.isFunction(successfunc)) { o.successfunc = successfunc; } if (typeof url !== "undefined") { o.url = url; } if (typeof extraparam !== "undefined") { o.extraparam = extraparam; } if ($.isFunction(aftersavefunc)) { o.aftersavefunc = aftersavefunc; } if ($.isFunction(errorfunc)) { o.errorfunc = errorfunc; } if ($.isFunction(afterrestorefunc)) { o.afterrestorefunc = afterrestorefunc; } // last two not as param, but as object (sorry) //if (typeof restoreAfterError !== "undefined") { o.restoreAfterError = restoreAfterError; } //if (typeof mtype !== "undefined") { o.mtype = mtype || "POST"; } } o = $.extend(true, { keys : false, oneditfunc: null, successfunc: null, url: null, extraparam: {}, aftersavefunc: null, errorfunc: null, afterrestorefunc: null, restoreAfterError: true, mtype: "POST" }, $.jgrid.inlineEdit, o ); // End compatible return this.each(function(){ var $t = this, nm, tmp, editable, cnt=0, focus=null, svr={}, ind,cm; if (!$t.grid ) { return; } ind = $($t).jqGrid("getInd",rowid,true); if( ind === false ) {return;} editable = $(ind).attr("editable") || "0"; if (editable == "0" && !$(ind).hasClass("not-editable-row")) { cm = $t.p.colModel; $('td[role="gridcell"]',ind).each( function(i) { nm = cm[i].name; var treeg = $t.p.treeGrid===true && nm == $t.p.ExpandColumn; if(treeg) { tmp = $("span:first",this).html();} else { try { tmp = $.unformat.call($t,this,{rowId:rowid, colModel:cm[i]},i); } catch (_) { tmp = ( cm[i].edittype && cm[i].edittype == 'textarea' ) ? $(this).text() : $(this).html(); } } if ( nm != 'cb' && nm != 'subgrid' && nm != 'rn') { if($t.p.autoencode) { tmp = $.jgrid.htmlDecode(tmp); } svr[nm]=tmp; if(cm[i].editable===true) { if(focus===null) { focus = i; } if (treeg) { $("span:first",this).html(""); } else { $(this).html(""); } var opt = $.extend({},cm[i].editoptions || {},{id:rowid+"_"+nm,name:nm}); if(!cm[i].edittype) { cm[i].edittype = "text"; } if(tmp == "&nbsp;" || tmp == "&#160;" || (tmp.length==1 && tmp.charCodeAt(0)==160) ) {tmp='';} var elc = $.jgrid.createEl.call($t,cm[i].edittype,opt,tmp,true,$.extend({},$.jgrid.ajaxOptions,$t.p.ajaxSelectOptions || {})); $(elc).addClass("editable"); if(treeg) { $("span:first",this).append(elc); } else { $(this).append(elc); } //Again IE if(cm[i].edittype == "select" && typeof(cm[i].editoptions)!=="undefined" && cm[i].editoptions.multiple===true && typeof(cm[i].editoptions.dataUrl)==="undefined" && $.browser.msie) { $(elc).width($(elc).width()); } cnt++; } } }); if(cnt > 0) { svr.id = rowid; $t.p.savedRow.push(svr); $(ind).attr("editable","1"); $("td:eq("+focus+") input",ind).focus(); if(o.keys===true) { $(ind).bind("keydown",function(e) { if (e.keyCode === 27) { $($t).jqGrid("restoreRow",rowid, o.afterrestorefunc); if($t.p._inlinenav) { try { $($t).jqGrid('showAddEditButtons'); } catch (eer1) {} } return false; } if (e.keyCode === 13) { var ta = e.target; if(ta.tagName == 'TEXTAREA') { return true; } if( $($t).jqGrid("saveRow", rowid, o ) ) { if($t.p._inlinenav) { try { $($t).jqGrid('showAddEditButtons'); } catch (eer2) {} } } return false; } }); } $($t).triggerHandler("jqGridInlineEditRow", [rowid, o]); if( $.isFunction(o.oneditfunc)) { o.oneditfunc.call($t, rowid); } } } }); }, saveRow : function(rowid, successfunc, url, extraparam, aftersavefunc,errorfunc, afterrestorefunc) { // Compatible mode old versions var args = $.makeArray(arguments).slice(1), o = {}; if( $.type(args[0]) === "object" ) { o = args[0]; } else { if ($.isFunction(successfunc)) { o.successfunc = successfunc; } if (typeof url !== "undefined") { o.url = url; } if (typeof extraparam !== "undefined") { o.extraparam = extraparam; } if ($.isFunction(aftersavefunc)) { o.aftersavefunc = aftersavefunc; } if ($.isFunction(errorfunc)) { o.errorfunc = errorfunc; } if ($.isFunction(afterrestorefunc)) { o.afterrestorefunc = afterrestorefunc; } } o = $.extend(true, { successfunc: null, url: null, extraparam: {}, aftersavefunc: null, errorfunc: null, afterrestorefunc: null, restoreAfterError: true, mtype: "POST" }, $.jgrid.inlineEdit, o ); // End compatible var success = false; var $t = this[0], nm, tmp={}, tmp2={}, tmp3= {}, editable, fr, cv, ind; if (!$t.grid ) { return success; } ind = $($t).jqGrid("getInd",rowid,true); if(ind === false) {return success;} editable = $(ind).attr("editable"); o.url = o.url ? o.url : $t.p.editurl; if (editable==="1") { var cm; $('td[role="gridcell"]',ind).each(function(i) { cm = $t.p.colModel[i]; nm = cm.name; if ( nm != 'cb' && nm != 'subgrid' && cm.editable===true && nm != 'rn' && !$(this).hasClass('not-editable-cell')) { switch (cm.edittype) { case "checkbox": var cbv = ["Yes","No"]; if(cm.editoptions ) { cbv = cm.editoptions.value.split(":"); } tmp[nm]= $("input",this).is(":checked") ? cbv[0] : cbv[1]; break; case 'text': case 'password': case 'textarea': case "button" : tmp[nm]=$("input, textarea",this).val(); break; case 'select': if(!cm.editoptions.multiple) { tmp[nm] = $("select option:selected",this).val(); tmp2[nm] = $("select option:selected", this).text(); } else { var sel = $("select",this), selectedText = []; tmp[nm] = $(sel).val(); if(tmp[nm]) { tmp[nm]= tmp[nm].join(","); } else { tmp[nm] =""; } $("select option:selected",this).each( function(i,selected){ selectedText[i] = $(selected).text(); } ); tmp2[nm] = selectedText.join(","); } if(cm.formatter && cm.formatter == 'select') { tmp2={}; } break; case 'custom' : try { if(cm.editoptions && $.isFunction(cm.editoptions.custom_value)) { tmp[nm] = cm.editoptions.custom_value.call($t, $(".customelement",this),'get'); if (tmp[nm] === undefined) { throw "e2"; } } else { throw "e1"; } } catch (e) { if (e=="e1") { $.jgrid.info_dialog($.jgrid.errors.errcap,"function 'custom_value' "+$.jgrid.edit.msg.nodefined,$.jgrid.edit.bClose); } if (e=="e2") { $.jgrid.info_dialog($.jgrid.errors.errcap,"function 'custom_value' "+$.jgrid.edit.msg.novalue,$.jgrid.edit.bClose); } else { $.jgrid.info_dialog($.jgrid.errors.errcap,e.message,$.jgrid.edit.bClose); } } break; } cv = $.jgrid.checkValues(tmp[nm],i,$t); if(cv[0] === false) { cv[1] = tmp[nm] + " " + cv[1]; return false; } if($t.p.autoencode) { tmp[nm] = $.jgrid.htmlEncode(tmp[nm]); } if(o.url !== 'clientArray' && cm.editoptions && cm.editoptions.NullIfEmpty === true) { if(tmp[nm] === "") { tmp3[nm] = 'null'; } } } }); if (cv[0] === false){ try { var positions = $.jgrid.findPos($("#"+$.jgrid.jqID(rowid), $t.grid.bDiv)[0]); $.jgrid.info_dialog($.jgrid.errors.errcap,cv[1],$.jgrid.edit.bClose,{left:positions[0],top:positions[1]}); } catch (e) { alert(cv[1]); } return success; } var idname, opers, oper; opers = $t.p.prmNames; oper = opers.oper; idname = opers.id; if(tmp) { tmp[oper] = opers.editoper; tmp[idname] = rowid; if(typeof($t.p.inlineData) == 'undefined') { $t.p.inlineData ={}; } tmp = $.extend({},tmp,$t.p.inlineData,o.extraparam); } if (o.url == 'clientArray') { tmp = $.extend({},tmp, tmp2); if($t.p.autoencode) { $.each(tmp,function(n,v){ tmp[n] = $.jgrid.htmlDecode(v); }); } var resp = $($t).jqGrid("setRowData",rowid,tmp); $(ind).attr("editable","0"); for( var k=0;k<$t.p.savedRow.length;k++) { if( $t.p.savedRow[k].id == rowid) {fr = k; break;} } if(fr >= 0) { $t.p.savedRow.splice(fr,1); } $($t).triggerHandler("jqGridInlineAfterSaveRow", [rowid, resp, tmp, o]); if( $.isFunction(o.aftersavefunc) ) { o.aftersavefunc.call($t, rowid,resp, o); } success = true; $(ind).unbind("keydown"); } else { $("#lui_"+$.jgrid.jqID($t.p.id)).show(); tmp3 = $.extend({},tmp,tmp3); tmp3[idname] = $.jgrid.stripPref($t.p.idPrefix, tmp3[idname]); $.ajax($.extend({ url:o.url, data: $.isFunction($t.p.serializeRowData) ? $t.p.serializeRowData.call($t, tmp3) : tmp3, type: o.mtype, async : false, //?!? complete: function(res,stat){ $("#lui_"+$.jgrid.jqID($t.p.id)).hide(); if (stat === "success"){ var ret = true, sucret; sucret = $($t).triggerHandler("jqGridInlineSuccessSaveRow", [res, rowid, o]); if (!$.isArray(sucret)) {sucret = [true, tmp];} if (sucret[0] && $.isFunction(o.successfunc)) {sucret = o.successfunc.call($t, res);} if($.isArray(sucret)) { // expect array - status, data, rowid ret = sucret[0]; tmp = sucret[1] ? sucret[1] : tmp; } else { ret = sucret; } if (ret===true) { if($t.p.autoencode) { $.each(tmp,function(n,v){ tmp[n] = $.jgrid.htmlDecode(v); }); } tmp = $.extend({},tmp, tmp2); $($t).jqGrid("setRowData",rowid,tmp); $(ind).attr("editable","0"); for( var k=0;k<$t.p.savedRow.length;k++) { if( $t.p.savedRow[k].id == rowid) {fr = k; break;} } if(fr >= 0) { $t.p.savedRow.splice(fr,1); } $($t).triggerHandler("jqGridInlineAfterSaveRow", [rowid, res, tmp, o]); if( $.isFunction(o.aftersavefunc) ) { o.aftersavefunc.call($t, rowid,res); } success = true; $(ind).unbind("keydown"); } else { $($t).triggerHandler("jqGridInlineErrorSaveRow", [rowid, res, stat, null, o]); if($.isFunction(o.errorfunc) ) { o.errorfunc.call($t, rowid, res, stat, null); } if(o.restoreAfterError === true) { $($t).jqGrid("restoreRow",rowid, o.afterrestorefunc); } } } }, error:function(res,stat,err){ $("#lui_"+$.jgrid.jqID($t.p.id)).hide(); $($t).triggerHandler("jqGridInlineErrorSaveRow", [rowid, res, stat, err, o]); if($.isFunction(o.errorfunc) ) { o.errorfunc.call($t, rowid, res, stat, err); } else { var rT = res.responseText || res.statusText; try { $.jgrid.info_dialog($.jgrid.errors.errcap,'<div class="ui-state-error">'+ rT +'</div>', $.jgrid.edit.bClose,{buttonalign:'right'}); } catch(e) { alert(rT); } } if(o.restoreAfterError === true) { $($t).jqGrid("restoreRow",rowid, o.afterrestorefunc); } } }, $.jgrid.ajaxOptions, $t.p.ajaxRowOptions || {})); } } return success; }, restoreRow : function(rowid, afterrestorefunc) { // Compatible mode old versions var args = $.makeArray(arguments).slice(1), o={}; if( $.type(args[0]) === "object" ) { o = args[0]; } else { if ($.isFunction(afterrestorefunc)) { o.afterrestorefunc = afterrestorefunc; } } o = $.extend(true, $.jgrid.inlineEdit, o ); // End compatible return this.each(function(){ var $t= this, fr, ind, ares={}; if (!$t.grid ) { return; } ind = $($t).jqGrid("getInd",rowid,true); if(ind === false) {return;} for( var k=0;k<$t.p.savedRow.length;k++) { if( $t.p.savedRow[k].id == rowid) {fr = k; break;} } if(fr >= 0) { if($.isFunction($.fn.datepicker)) { try { $("input.hasDatepicker","#"+$.jgrid.jqID(ind.id)).datepicker('hide'); } catch (e) {} } $.each($t.p.colModel, function(){ if(this.editable === true && this.name in $t.p.savedRow[fr] ) { ares[this.name] = $t.p.savedRow[fr][this.name]; } }); $($t).jqGrid("setRowData",rowid,ares); $(ind).attr("editable","0").unbind("keydown"); $t.p.savedRow.splice(fr,1); if($("#"+$.jgrid.jqID(rowid), "#"+$.jgrid.jqID($t.p.id)).hasClass("jqgrid-new-row")){ setTimeout(function(){$($t).jqGrid("delRowData",rowid);},0); } } $($t).triggerHandler("jqGridInlineAfterRestoreRow", [rowid]); if ($.isFunction(o.afterrestorefunc)) { o.afterrestorefunc.call($t, rowid); } }); }, addRow : function ( p ) { p = $.extend(true, { rowID : "new_row", initdata : {}, position :"first", useDefValues : true, useFormatter : false, addRowParams : {extraparam:{}} },p || {}); return this.each(function(){ if (!this.grid ) { return; } var $t = this; if(p.useDefValues === true) { $($t.p.colModel).each(function(){ if( this.editoptions && this.editoptions.defaultValue ) { var opt = this.editoptions.defaultValue, tmp = $.isFunction(opt) ? opt.call($t) : opt; p.initdata[this.name] = tmp; } }); } $($t).jqGrid('addRowData', p.rowID, p.initdata, p.position); p.rowID = $t.p.idPrefix + p.rowID; $("#"+$.jgrid.jqID(p.rowID), "#"+$.jgrid.jqID($t.p.id)).addClass("jqgrid-new-row"); if(p.useFormatter) { $("#"+$.jgrid.jqID(p.rowID)+" .ui-inline-edit", "#"+$.jgrid.jqID($t.p.id)).click(); } else { var opers = $t.p.prmNames, oper = opers.oper; p.addRowParams.extraparam[oper] = opers.addoper; $($t).jqGrid('editRow', p.rowID, p.addRowParams); $($t).jqGrid('setSelection', p.rowID); } }); }, inlineNav : function (elem, o) { o = $.extend({ edit: true, editicon: "ui-icon-pencil", add: true, addicon:"ui-icon-plus", save: true, saveicon:"ui-icon-disk", cancel: true, cancelicon:"ui-icon-cancel", addParams : {useFormatter : false,rowID : "new_row"}, editParams : {}, restoreAfterSelect : true }, $.jgrid.nav, o ||{}); return this.each(function(){ if (!this.grid ) { return; } var $t = this, onSelect, gID = $.jgrid.jqID($t.p.id); $t.p._inlinenav = true; // detect the formatactions column if(o.addParams.useFormatter === true) { var cm = $t.p.colModel,i; for (i = 0; i<cm.length; i++) { if(cm[i].formatter && cm[i].formatter === "actions" ) { if(cm[i].formatoptions) { var defaults = { keys:false, onEdit : null, onSuccess: null, afterSave:null, onError: null, afterRestore: null, extraparam: {}, url: null }, ap = $.extend( defaults, cm[i].formatoptions ); o.addParams.addRowParams = { "keys" : ap.keys, "oneditfunc" : ap.onEdit, "successfunc" : ap.onSuccess, "url" : ap.url, "extraparam" : ap.extraparam, "aftersavefunc" : ap.afterSavef, "errorfunc": ap.onError, "afterrestorefunc" : ap.afterRestore }; } break; } } } if(o.add) { $($t).jqGrid('navButtonAdd', elem,{ caption : o.addtext, title : o.addtitle, buttonicon : o.addicon, id : $t.p.id+"_iladd", onClickButton : function () { $($t).jqGrid('addRow', o.addParams); if(!o.addParams.useFormatter) { $("#"+gID+"_ilsave").removeClass('ui-state-disabled'); $("#"+gID+"_ilcancel").removeClass('ui-state-disabled'); $("#"+gID+"_iladd").addClass('ui-state-disabled'); $("#"+gID+"_iledit").addClass('ui-state-disabled'); } } }); } if(o.edit) { $($t).jqGrid('navButtonAdd', elem,{ caption : o.edittext, title : o.edittitle, buttonicon : o.editicon, id : $t.p.id+"_iledit", onClickButton : function () { var sr = $($t).jqGrid('getGridParam','selrow'); if(sr) { $($t).jqGrid('editRow', sr, o.editParams); $("#"+gID+"_ilsave").removeClass('ui-state-disabled'); $("#"+gID+"_ilcancel").removeClass('ui-state-disabled'); $("#"+gID+"_iladd").addClass('ui-state-disabled'); $("#"+gID+"_iledit").addClass('ui-state-disabled'); } else { $.jgrid.viewModal("#alertmod",{gbox:"#gbox_"+gID,jqm:true});$("#jqg_alrt").focus(); } } }); } if(o.save) { $($t).jqGrid('navButtonAdd', elem,{ caption : o.savetext || '', title : o.savetitle || 'Save row', buttonicon : o.saveicon, id : $t.p.id+"_ilsave", onClickButton : function () { var sr = $t.p.savedRow[0].id; if(sr) { var opers = $t.p.prmNames, oper = opers.oper; if(!o.editParams.extraparam) { o.editParams.extraparam = {}; } if($("#"+$.jgrid.jqID(sr), "#"+gID ).hasClass("jqgrid-new-row")) { o.editParams.extraparam[oper] = opers.addoper; } else { o.editParams.extraparam[oper] = opers.editoper; } if( $($t).jqGrid('saveRow', sr, o.editParams) ) { $($t).jqGrid('showAddEditButtons'); } } else { $.jgrid.viewModal("#alertmod",{gbox:"#gbox_"+gID,jqm:true});$("#jqg_alrt").focus(); } } }); $("#"+gID+"_ilsave").addClass('ui-state-disabled'); } if(o.cancel) { $($t).jqGrid('navButtonAdd', elem,{ caption : o.canceltext || '', title : o.canceltitle || 'Cancel row editing', buttonicon : o.cancelicon, id : $t.p.id+"_ilcancel", onClickButton : function () { var sr = $t.p.savedRow[0].id; if(sr) { $($t).jqGrid('restoreRow', sr, o.editParams); $($t).jqGrid('showAddEditButtons'); } else { $.jgrid.viewModal("#alertmod",{gbox:"#gbox_"+gID,jqm:true});$("#jqg_alrt").focus(); } } }); $("#"+gID+"_ilcancel").addClass('ui-state-disabled'); } if(o.restoreAfterSelect === true) { if($.isFunction($t.p.beforeSelectRow)) { onSelect = $t.p.beforeSelectRow; } else { onSelect = false; } $t.p.beforeSelectRow = function(id, stat) { var ret = true; if($t.p.savedRow.length > 0 && $t.p._inlinenav===true && ( id !== $t.p.selrow && $t.p.selrow !==null) ) { if($t.p.selrow == o.addParams.rowID ) { $($t).jqGrid('delRowData', $t.p.selrow); } else { $($t).jqGrid('restoreRow', $t.p.selrow, o.editParams); } $($t).jqGrid('showAddEditButtons'); } if(onSelect) { ret = onSelect.call($t, id, stat); } return ret; }; } }); }, showAddEditButtons : function() { return this.each(function(){ if (!this.grid ) { return; } var gID = $.jgrid.jqID(this.p.id); $("#"+gID+"_ilsave").addClass('ui-state-disabled'); $("#"+gID+"_ilcancel").addClass('ui-state-disabled'); $("#"+gID+"_iladd").removeClass('ui-state-disabled'); $("#"+gID+"_iledit").removeClass('ui-state-disabled'); }); } //end inline edit }); })(jQuery);
fdcmessenger/framework
sampleWebApp/src/main/webapp/scripts/jqgrid/src/grid.inlinedit.js
JavaScript
apache-2.0
19,833
/* Copyright 2016 Google Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.api.codegen.viewmodel; import com.google.auto.value.AutoValue; @AutoValue public abstract class DynamicLangDefaultableParamView { public abstract String name(); public abstract String defaultValue(); public static Builder newBuilder() { return new AutoValue_DynamicLangDefaultableParamView.Builder(); } @AutoValue.Builder public static abstract class Builder { public abstract Builder name(String name); public abstract Builder defaultValue(String value); public abstract DynamicLangDefaultableParamView build(); } }
geigerj/toolkit
src/main/java/com/google/api/codegen/viewmodel/DynamicLangDefaultableParamView.java
Java
apache-2.0
1,165
/** * Copyright (C) 2012 Eric Bottard / Guillaume Lederrey (eric.bottard+ghpublic@gmail.com / guillaume.lederrey@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.social.bitbucket.api.impl; import org.springframework.social.bitbucket.api.BitBucketInvitation; import org.springframework.social.bitbucket.api.BitBucketPrivilege; import org.springframework.social.bitbucket.api.InvitationsOperations; import org.springframework.social.support.ParameterMap; import org.springframework.web.client.RestTemplate; /** * @author Cyprian Śniegota * @since 2.0.0 */ public class InvitationsTemplate extends AbstractBitBucketOperations implements InvitationsOperations { public InvitationsTemplate(RestTemplate restTemplate, boolean authorized) { super(restTemplate, authorized, V1); } @Override public final BitBucketInvitation sendInvitation(String accountName, String repoSlug, String emailAddress, BitBucketPrivilege perm) { return getRestTemplate().postForObject(buildUrl("/invitations/{accountname}/{repo_slug}/{emailaddress}"), new SendInvitationParametersHolder(perm), BitBucketInvitation.class, accountName, repoSlug, emailAddress); } private static final class SendInvitationParametersHolder extends ParameterMap { public SendInvitationParametersHolder(BitBucketPrivilege privilege) { add("permission", privilege.toString()); } } }
gehel/spring-social-bitbucket
src/main/java/org/springframework/social/bitbucket/api/impl/InvitationsTemplate.java
Java
apache-2.0
1,983
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Tpu.V1.Snippets { // [START tpu_v1_generated_Tpu_ListTensorFlowVersions_sync_flattened_resourceNames] using Google.Api.Gax; using Google.Cloud.Tpu.V1; using System; public sealed partial class GeneratedTpuClientSnippets { /// <summary>Snippet for ListTensorFlowVersions</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public void ListTensorFlowVersionsResourceNames() { // Create client TpuClient tpuClient = TpuClient.Create(); // Initialize request argument(s) TensorFlowVersionName parent = TensorFlowVersionName.FromProjectLocationTensorFlowVersion("[PROJECT]", "[LOCATION]", "[TENSOR_FLOW_VERSION]"); // Make the request PagedEnumerable<ListTensorFlowVersionsResponse, TensorFlowVersion> response = tpuClient.ListTensorFlowVersions(parent); // Iterate over all response items, lazily performing RPCs as required foreach (TensorFlowVersion item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListTensorFlowVersionsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (TensorFlowVersion item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<TensorFlowVersion> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (TensorFlowVersion item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; } } // [END tpu_v1_generated_Tpu_ListTensorFlowVersions_sync_flattened_resourceNames] }
googleapis/google-cloud-dotnet
apis/Google.Cloud.Tpu.V1/Google.Cloud.Tpu.V1.GeneratedSnippets/TpuClient.ListTensorFlowVersionsResourceNamesSnippet.g.cs
C#
apache-2.0
3,233
// Code generated by smithy-go-codegen DO NOT EDIT. package ec2 import ( "context" "fmt" awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware" "github.com/aws/aws-sdk-go-v2/aws/signer/v4" "github.com/aws/aws-sdk-go-v2/service/ec2/types" "github.com/aws/smithy-go/middleware" smithyhttp "github.com/aws/smithy-go/transport/http" ) // Describes one or more of your paths. func (c *Client) DescribeNetworkInsightsPaths(ctx context.Context, params *DescribeNetworkInsightsPathsInput, optFns ...func(*Options)) (*DescribeNetworkInsightsPathsOutput, error) { if params == nil { params = &DescribeNetworkInsightsPathsInput{} } result, metadata, err := c.invokeOperation(ctx, "DescribeNetworkInsightsPaths", params, optFns, c.addOperationDescribeNetworkInsightsPathsMiddlewares) if err != nil { return nil, err } out := result.(*DescribeNetworkInsightsPathsOutput) out.ResultMetadata = metadata return out, nil } type DescribeNetworkInsightsPathsInput struct { // Checks whether you have the required permissions for the action, without // actually making the request, and provides an error response. If you have the // required permissions, the error response is DryRunOperation. Otherwise, it is // UnauthorizedOperation. DryRun *bool // The filters. The following are possible values: // // * Destination - The ID of the // resource. // // * DestinationPort - The destination port. // // * Name - The path name. // // * // Protocol - The protocol. // // * Source - The ID of the resource. Filters []types.Filter // The maximum number of results to return with a single call. To retrieve the // remaining results, make another call with the returned nextToken value. MaxResults *int32 // The IDs of the paths. NetworkInsightsPathIds []string // The token for the next page of results. NextToken *string noSmithyDocumentSerde } type DescribeNetworkInsightsPathsOutput struct { // Information about the paths. NetworkInsightsPaths []types.NetworkInsightsPath // The token to use to retrieve the next page of results. This value is null when // there are no more results to return. NextToken *string // Metadata pertaining to the operation's result. ResultMetadata middleware.Metadata noSmithyDocumentSerde } func (c *Client) addOperationDescribeNetworkInsightsPathsMiddlewares(stack *middleware.Stack, options Options) (err error) { err = stack.Serialize.Add(&awsEc2query_serializeOpDescribeNetworkInsightsPaths{}, middleware.After) if err != nil { return err } err = stack.Deserialize.Add(&awsEc2query_deserializeOpDescribeNetworkInsightsPaths{}, middleware.After) if err != nil { return err } if err = addSetLoggerMiddleware(stack, options); err != nil { return err } if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil { return err } if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil { return err } if err = addResolveEndpointMiddleware(stack, options); err != nil { return err } if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil { return err } if err = addRetryMiddlewares(stack, options); err != nil { return err } if err = addHTTPSignerV4Middleware(stack, options); err != nil { return err } if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil { return err } if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil { return err } if err = addClientUserAgent(stack); err != nil { return err } if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil { return err } if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil { return err } if err = stack.Initialize.Add(newServiceMetadataMiddleware_opDescribeNetworkInsightsPaths(options.Region), middleware.Before); err != nil { return err } if err = addRequestIDRetrieverMiddleware(stack); err != nil { return err } if err = addResponseErrorMiddleware(stack); err != nil { return err } if err = addRequestResponseLogging(stack, options); err != nil { return err } return nil } // DescribeNetworkInsightsPathsAPIClient is a client that implements the // DescribeNetworkInsightsPaths operation. type DescribeNetworkInsightsPathsAPIClient interface { DescribeNetworkInsightsPaths(context.Context, *DescribeNetworkInsightsPathsInput, ...func(*Options)) (*DescribeNetworkInsightsPathsOutput, error) } var _ DescribeNetworkInsightsPathsAPIClient = (*Client)(nil) // DescribeNetworkInsightsPathsPaginatorOptions is the paginator options for // DescribeNetworkInsightsPaths type DescribeNetworkInsightsPathsPaginatorOptions struct { // The maximum number of results to return with a single call. To retrieve the // remaining results, make another call with the returned nextToken value. Limit int32 // Set to true if pagination should stop if the service returns a pagination token // that matches the most recent token provided to the service. StopOnDuplicateToken bool } // DescribeNetworkInsightsPathsPaginator is a paginator for // DescribeNetworkInsightsPaths type DescribeNetworkInsightsPathsPaginator struct { options DescribeNetworkInsightsPathsPaginatorOptions client DescribeNetworkInsightsPathsAPIClient params *DescribeNetworkInsightsPathsInput nextToken *string firstPage bool } // NewDescribeNetworkInsightsPathsPaginator returns a new // DescribeNetworkInsightsPathsPaginator func NewDescribeNetworkInsightsPathsPaginator(client DescribeNetworkInsightsPathsAPIClient, params *DescribeNetworkInsightsPathsInput, optFns ...func(*DescribeNetworkInsightsPathsPaginatorOptions)) *DescribeNetworkInsightsPathsPaginator { if params == nil { params = &DescribeNetworkInsightsPathsInput{} } options := DescribeNetworkInsightsPathsPaginatorOptions{} if params.MaxResults != nil { options.Limit = *params.MaxResults } for _, fn := range optFns { fn(&options) } return &DescribeNetworkInsightsPathsPaginator{ options: options, client: client, params: params, firstPage: true, nextToken: params.NextToken, } } // HasMorePages returns a boolean indicating whether more pages are available func (p *DescribeNetworkInsightsPathsPaginator) HasMorePages() bool { return p.firstPage || (p.nextToken != nil && len(*p.nextToken) != 0) } // NextPage retrieves the next DescribeNetworkInsightsPaths page. func (p *DescribeNetworkInsightsPathsPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*DescribeNetworkInsightsPathsOutput, error) { if !p.HasMorePages() { return nil, fmt.Errorf("no more pages available") } params := *p.params params.NextToken = p.nextToken var limit *int32 if p.options.Limit > 0 { limit = &p.options.Limit } params.MaxResults = limit result, err := p.client.DescribeNetworkInsightsPaths(ctx, &params, optFns...) if err != nil { return nil, err } p.firstPage = false prevToken := p.nextToken p.nextToken = result.NextToken if p.options.StopOnDuplicateToken && prevToken != nil && p.nextToken != nil && *prevToken == *p.nextToken { p.nextToken = nil } return result, nil } func newServiceMetadataMiddleware_opDescribeNetworkInsightsPaths(region string) *awsmiddleware.RegisterServiceMetadata { return &awsmiddleware.RegisterServiceMetadata{ Region: region, ServiceID: ServiceID, SigningName: "ec2", OperationName: "DescribeNetworkInsightsPaths", } }
michi-covalent/cilium
vendor/github.com/aws/aws-sdk-go-v2/service/ec2/api_op_DescribeNetworkInsightsPaths.go
GO
apache-2.0
7,413
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.css.engine.value.svg12; import org.apache.batik.css.engine.value.AbstractValue; import org.w3c.dom.DOMException; import org.w3c.dom.css.CSSValue; /** * This class represents an device-specific color value. * * @version $Id$ */ public class DeviceColor extends AbstractValue { public static final String DEVICE_GRAY_COLOR_FUNCTION = "device-gray"; public static final String DEVICE_RGB_COLOR_FUNCTION = "device-rgb"; public static final String DEVICE_CMYK_COLOR_FUNCTION = "device-cmyk"; public static final String DEVICE_NCHANNEL_COLOR_FUNCTION = "device-nchannel"; protected boolean nChannel; /** * The color count. */ protected int count; /** * The colors. */ protected float[] colors = new float[5]; /** * Creates a new DeviceColor. * @param nChannel true for a device-nchannel() color, false for Gray, RGB and CMYK */ public DeviceColor(boolean nChannel) { this.nChannel = nChannel; } /** * Implements {@link * org.apache.batik.css.engine.value.Value#getCssValueType()}. */ public short getCssValueType() { return CSSValue.CSS_CUSTOM; } /** * Indicates whether this color uses an N-Channel color space. * @return true if N-Channel is used */ public boolean isNChannel() { return this.nChannel; } /** * Returns the number of colors. */ public int getNumberOfColors() throws DOMException { return count; } /** * Returns the color at the given index. */ public float getColor(int i) throws DOMException { return colors[i]; } /** * A string representation of the current value. */ public String getCssText() { StringBuffer sb = new StringBuffer( count * 8 ); if (nChannel) { sb.append(DEVICE_NCHANNEL_COLOR_FUNCTION); } else { switch (count) { case 1: sb.append(DEVICE_GRAY_COLOR_FUNCTION); break; case 3: sb.append(DEVICE_RGB_COLOR_FUNCTION); break; case 4: sb.append(DEVICE_CMYK_COLOR_FUNCTION); break; default: throw new IllegalStateException("Invalid number of components encountered"); } } sb.append('('); for (int i = 0; i < count; i++) { if (i > 0) { sb.append(", "); } sb.append(colors[i]); } sb.append( ')' ); return sb.toString(); } /** * Appends a color to the list. */ public void append(float c) { if (count == colors.length) { float[] t = new float[count * 2]; System.arraycopy( colors, 0, t, 0, count ); colors = t; } colors[count++] = c; } /** {@inheritDoc} */ public String toString() { return getCssText(); } }
apache/batik
batik-css/src/main/java/org/apache/batik/css/engine/value/svg12/DeviceColor.java
Java
apache-2.0
3,839
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.table.caching.guava; import com.google.common.cache.Cache; import org.apache.samza.SamzaException; import org.apache.samza.context.Context; import org.apache.samza.storage.kv.Entry; import org.apache.samza.table.BaseReadWriteTable; import org.apache.samza.table.ReadWriteTable; import org.apache.samza.table.utils.TableMetricsUtil; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; /** * Simple cache table backed by a Guava cache instance. Application is expect to build * a cache instance with desired parameters and specify it to the table descriptor. * * @param <K> type of the key in the cache * @param <V> type of the value in the cache */ public class GuavaCacheTable<K, V> extends BaseReadWriteTable<K, V> implements ReadWriteTable<K, V> { private final Cache<K, V> cache; public GuavaCacheTable(String tableId, Cache<K, V> cache) { super(tableId); this.cache = cache; } @Override public void init(Context context) { super.init(context); TableMetricsUtil tableMetricsUtil = new TableMetricsUtil(context, this, tableId); // hit- and miss-rate are provided by CachingTable. tableMetricsUtil.newGauge("evict-count", () -> cache.stats().evictionCount()); } @Override public V get(K key, Object ... args) { try { return getAsync(key).get(); } catch (Exception e) { throw new SamzaException("GET failed for " + key, e); } } @Override public CompletableFuture<V> getAsync(K key, Object ... args) { CompletableFuture<V> future = new CompletableFuture<>(); try { future.complete(cache.getIfPresent(key)); } catch (Exception e) { future.completeExceptionally(e); } return future; } @Override public Map<K, V> getAll(List<K> keys, Object ... args) { try { return getAllAsync(keys).get(); } catch (Exception e) { throw new SamzaException("GET_ALL failed for " + keys, e); } } @Override public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys, Object ... args) { CompletableFuture<Map<K, V>> future = new CompletableFuture<>(); try { future.complete(cache.getAllPresent(keys)); } catch (Exception e) { future.completeExceptionally(e); } return future; } @Override public void put(K key, V value, Object ... args) { try { putAsync(key, value).get(); } catch (Exception e) { throw new SamzaException("PUT failed for " + key, e); } } @Override public CompletableFuture<Void> putAsync(K key, V value, Object ... args) { if (key == null) { return deleteAsync(key); } CompletableFuture<Void> future = new CompletableFuture<>(); try { cache.put(key, value); future.complete(null); } catch (Exception e) { future.completeExceptionally(e); } return future; } @Override public void putAll(List<Entry<K, V>> entries, Object ... args) { try { putAllAsync(entries).get(); } catch (Exception e) { throw new SamzaException("PUT_ALL failed", e); } } @Override public CompletableFuture<Void> putAllAsync(List<Entry<K, V>> entries, Object ... args) { CompletableFuture<Void> future = new CompletableFuture<>(); try { // Separate out put vs delete records List<K> delKeys = new ArrayList<>(); List<Entry<K, V>> putRecords = new ArrayList<>(); entries.forEach(r -> { if (r.getValue() != null) { putRecords.add(r); } else { delKeys.add(r.getKey()); } }); cache.invalidateAll(delKeys); putRecords.forEach(e -> put(e.getKey(), e.getValue())); future.complete(null); } catch (Exception e) { future.completeExceptionally(e); } return future; } @Override public void delete(K key, Object ... args) { try { deleteAsync(key).get(); } catch (Exception e) { throw new SamzaException("DELETE failed", e); } } @Override public CompletableFuture<Void> deleteAsync(K key, Object ... args) { CompletableFuture<Void> future = new CompletableFuture<>(); try { cache.invalidate(key); future.complete(null); } catch (Exception e) { future.completeExceptionally(e); } return future; } @Override public void deleteAll(List<K> keys, Object ... args) { try { deleteAllAsync(keys).get(); } catch (Exception e) { throw new SamzaException("DELETE_ALL failed", e); } } @Override public CompletableFuture<Void> deleteAllAsync(List<K> keys, Object ... args) { CompletableFuture<Void> future = new CompletableFuture<>(); try { cache.invalidateAll(keys); future.complete(null); } catch (Exception e) { future.completeExceptionally(e); } return future; } @Override public synchronized void flush() { cache.cleanUp(); } @Override public synchronized void close() { cache.invalidateAll(); } }
prateekm/samza
samza-core/src/main/java/org/apache/samza/table/caching/guava/GuavaCacheTable.java
Java
apache-2.0
5,858
#!/usr/bin/env python # encoding: utf-8 # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from flask import Flask, request import nltk import json from nltk_contrib import timex import time import sys import getopt USAGE = """ nltk-rest --port -p <port> -v units -u [--help -h] Expose NLTK over REST as a server using Python Flask. Submit content to the `/nltk` endpoint in the REST body request. -h, --help Prints this message. -p, --port Sets the port for the REST server, default is 8881. -u, --units Enable parser to extract measurements from text """ Verbose = 0 Port = 8881 #default port Units = 0 def echo2(*s): sys.stderr.write('server.py [NLTK]: ' + ' '.join(map(str, s)) + '\n') app = Flask(__name__) @app.route('/') def status(): msg = ''' <html><head><title>NLTK REST Server</title></head><body><h3>NLTK REST server</h3> <p>This app exposes the Python <a href="http://nltk.org/">Natural Language Toolkit (NLTK)</a> as a REST server.</p> <h2>Status: Running</h2> <p>More apps from the <a href="//irds.usc.edu/">USC Information Retrieval & Data Science Group</a>.</p> ''' return msg @app.route('/nltk', methods=["PUT", "POST"]) def namedEntityRecognizer(): echo2("Performing NER on incoming stream") content = request.stream.read() if Verbose: echo2("Incoming content is "+content) start = time.time() date_time = timex.tag(content) tokenized = nltk.word_tokenize(content.decode("utf-8")) tagged = nltk.pos_tag(tokenized) namedEnt = nltk.ne_chunk(tagged, binary=True) names = extract_entity_names(namedEnt, 'NE') names.extend(date_time) result = {"result" : "success", "names" : names} if Units: grammar = '''unit: {<CD><NNS>?<NN.*>?}, unit: {<CD><JJ>?<NN.*>} ''' parser = nltk.RegexpParser(grammar) units = extract_entity_names(parser.parse(tagged),'unit') result['units'] = units jsonDoc = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': ')) end = time.time() print "NER took "+str(end - start)+" seconds" return jsonDoc # Based on example from: # https://gist.github.com/onyxfish/322906 def extract_entity_names(t, label): entity_names = [] if hasattr(t, 'label') and t.label: if t.label() == label: entity_names.append(' '.join([child[0] for child in t])) else: for child in t: entity_names.extend(extract_entity_names(child, label)) return entity_names def main(argv=None): """Run NLTK REST server from command line according to USAGE.""" global Verbose global Units if argv is None: argv = sys.argv try: opts, argv = getopt.getopt(argv[1:], 'hp:vu', ['help', 'port=', 'verbose', 'units']) except getopt.GetoptError, (msg, bad_opt): die("%s error: Bad option: %s, %s" % (argv[0], bad_opt, msg)) port = Port for opt, val in opts: if opt in ('-h', '--help'): echo2(USAGE); sys.exit() elif opt in ('--port'): port = int(val) elif opt in ('-v', '--verbose'): Verbose = 1 elif opt in ('-u', '--units'): Units = 1 else: die(USAGE) app.run(debug=Verbose, port=port) if __name__ == '__main__': main(sys.argv)
chrismattmann/NLTKRest
nltkrest/nltkrest/server.py
Python
apache-2.0
4,079
<?php /** * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) * * Licensed under The MIT License * For full copyright and license information, please see the LICENSE.txt * Redistributions of files must retain the above copyright notice. * * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) * @link http://cakephp.org CakePHP(tm) Project * @license http://www.opensource.org/licenses/mit-license.php MIT License */ namespace DebugKit\Model\Table; use Cake\Core\App; use Cake\Database\Connection; use Cake\Datasource\FixtureInterface; use PDOException; /** * A set of methods for building a database table when it is missing. * * Because the debugkit doesn't come with a pre-built SQLite database, * we'll need to make it as we need it. * * This trait lets us dump fixture schema into a given database at runtime. */ trait LazyTableTrait { /** * Ensures the tables for the given fixtures exist in the schema. * * If the tables do not exist, they will be created on the current model's connection. * * @param array $fixtures The fixture names to check and/or insert. * @return void * @throws \RuntimeException When fixtures are missing/unknown/fail. */ public function ensureTables(array $fixtures) { /* @var Connection $connection */ $connection = $this->getConnection(); $schema = $connection->getSchemaCollection(); try { $existing = $schema->listTables(); } catch (PDOException $e) { // Handle errors when SQLite blows up if the schema has changed. if (strpos($e->getMessage(), 'schema has changed') !== false) { $existing = $schema->listTables(); } else { throw $e; } } foreach ($fixtures as $name) { $class = App::className($name, 'Test/Fixture', 'Fixture'); if ($class === false) { throw new \RuntimeException("Unknown fixture '$name'."); } /* @var FixtureInterface $fixture */ $fixture = new $class($connection->configName()); if (in_array($fixture->table, $existing)) { continue; } $fixture->create($connection); } } }
cloudfoundry/php-buildpack
fixtures/cake_local_deps/vendor/cakephp/debug_kit/src/Model/Table/LazyTableTrait.php
PHP
apache-2.0
2,364
package com.html5parser.constants; import java.util.HashMap; import java.util.Map; public class SVGAttributesTable { /** * Value description: * * first column is the attribute name * second column is the fixed attribute name */ public static final Map<String, String> TABLE; static { TABLE = new HashMap<String, String>(); TABLE.put("attributename", "attributeName"); TABLE.put("attributetype", "attributeType"); TABLE.put("basefrequency", "baseFrequency"); TABLE.put("baseprofile", "baseProfile"); TABLE.put("calcmode", "calcMode"); TABLE.put("clippathunits", "clipPathUnits"); TABLE.put("contentscripttype", "contentScriptType"); TABLE.put("contentstyletype", "contentStyleType"); TABLE.put("diffuseconstant", "diffuseConstant"); TABLE.put("edgemode", "edgeMode"); TABLE.put("externalresourcesrequired", "externalResourcesRequired"); TABLE.put("filterres", "filterRes"); TABLE.put("filterunits", "filterUnits"); TABLE.put("glyphref", "glyphRef"); TABLE.put("gradienttransform", "gradientTransform"); TABLE.put("gradientunits", "gradientUnits"); TABLE.put("kernelmatrix", "kernelMatrix"); TABLE.put("kernelunitlength", "kernelUnitLength"); TABLE.put("keypoints", "keyPoints"); TABLE.put("keysplines", "keySplines"); TABLE.put("keytimes", "keyTimes"); TABLE.put("lengthadjust", "lengthAdjust"); TABLE.put("limitingconeangle", "limitingConeAngle"); TABLE.put("markerheight", "markerHeight"); TABLE.put("markerunits", "markerUnits"); TABLE.put("markerwidth", "markerWidth"); TABLE.put("maskcontentunits", "maskContentUnits"); TABLE.put("maskunits", "maskUnits"); TABLE.put("numoctaves", "numOctaves"); TABLE.put("pathlength", "pathLength"); TABLE.put("patterncontentunits", "patternContentUnits"); TABLE.put("patterntransform", "patternTransform"); TABLE.put("patternunits", "patternUnits"); TABLE.put("pointsatx", "pointsAtX"); TABLE.put("pointsaty", "pointsAtY"); TABLE.put("pointsatz", "pointsAtZ"); TABLE.put("preservealpha", "preserveAlpha"); TABLE.put("preserveaspectratio", "preserveAspectRatio"); TABLE.put("primitiveunits", "primitiveUnits"); TABLE.put("refx", "refX"); TABLE.put("refy", "refY"); TABLE.put("repeatcount", "repeatCount"); TABLE.put("repeatdur", "repeatDur"); TABLE.put("requiredextensions", "requiredExtensions"); TABLE.put("requiredfeatures", "requiredFeatures"); TABLE.put("specularconstant", "specularConstant"); TABLE.put("specularexponent", "specularExponent"); TABLE.put("spreadmethod", "spreadMethod"); TABLE.put("startoffset", "startOffset"); TABLE.put("stddeviation", "stdDeviation"); TABLE.put("stitchtiles", "stitchTiles"); TABLE.put("surfacescale", "surfaceScale"); TABLE.put("systemlanguage", "systemLanguage"); TABLE.put("tablevalues", "tableValues"); TABLE.put("targetx", "targetX"); TABLE.put("targety", "targetY"); TABLE.put("textlength", "textLength"); TABLE.put("viewbox", "viewBox"); TABLE.put("viewtarget", "viewTarget"); TABLE.put("xchannelselector", "xChannelSelector"); TABLE.put("ychannelselector", "yChannelSelector"); TABLE.put("zoomandpan", "zoomAndPan"); } }
carlos-anaya/HTML5Parser
Code/src/main/java/com/html5parser/constants/SVGAttributesTable.java
Java
apache-2.0
3,160
/* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') { define = function(deps, factory) { deps = deps.map.forEach(function(id) { return require(id); }); module.exports = factory(deps); }; define.amd = {}; } if (typeof define === 'function' && define.amd) { define('activesync/codepages',[ 'wbxml', './codepages/Common', './codepages/AirSync', './codepages/Contacts', './codepages/Email', './codepages/Calendar', './codepages/Move', './codepages/ItemEstimate', './codepages/FolderHierarchy', './codepages/MeetingResponse', './codepages/Tasks', './codepages/ResolveRecipients', './codepages/ValidateCert', './codepages/Contacts2', './codepages/Ping', './codepages/Provision', './codepages/Search', './codepages/GAL', './codepages/AirSyncBase', './codepages/Settings', './codepages/DocumentLibrary', './codepages/ItemOperations', './codepages/ComposeMail', './codepages/Email2', './codepages/Notes', './codepages/RightsManagement' ], factory); } else { root.ActiveSyncCodepages = factory(WBXML, ASCPCommon, ASCPAirSync, ASCPContacts, ASCPEmail, ASCPCalendar, ASCPMove, ASCPItemEstimate, ASCPHierarchy, ASCPMeetingResponse, ASCPTasks, ASCPResolveRecipients, ASCPValidateCert, ASCPContacts2, ASCPPing, ASCPProvision, ASCPSearch, ASCPGAL, ASCPAirSyncBase, ASCPSettings, ASCPDocumentLibrary, ASCPItemOperations, ASCPComposeMail, ASCPEmail2, ASCPNotes, ASCPRightsManagement); } }(this, function(WBXML, Common, AirSync, Contacts, Email, Calendar, Move, ItemEstimate, FolderHierarchy, MeetingResponse, Tasks, ResolveRecipients, ValidateCert, Contacts2, Ping, Provision, Search, GAL, AirSyncBase, Settings, DocumentLibrary, ItemOperations, ComposeMail, Email2, Notes, RightsManagement) { 'use strict'; var codepages = { Common: Common, AirSync: AirSync, Contacts: Contacts, Email: Email, Calendar: Calendar, Move: Move, ItemEstimate: ItemEstimate, FolderHierarchy: FolderHierarchy, MeetingResponse: MeetingResponse, Tasks: Tasks, ResolveRecipients: ResolveRecipients, ValidateCert: ValidateCert, Contacts2: Contacts2, Ping: Ping, Provision: Provision, Search: Search, GAL: GAL, AirSyncBase: AirSyncBase, Settings: Settings, DocumentLibrary: DocumentLibrary, ItemOperations: ItemOperations, ComposeMail: ComposeMail, Email2: Email2, Notes: Notes, RightsManagement: RightsManagement }; WBXML.CompileCodepages(codepages); return codepages; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('wbxml',factory); else root.WBXML = factory(); }(this, function() { 'use strict'; var exports = {}; var Tokens = { SWITCH_PAGE: 0x00, END: 0x01, ENTITY: 0x02, STR_I: 0x03, LITERAL: 0x04, EXT_I_0: 0x40, EXT_I_1: 0x41, EXT_I_2: 0x42, PI: 0x43, LITERAL_C: 0x44, EXT_T_0: 0x80, EXT_T_1: 0x81, EXT_T_2: 0x82, STR_T: 0x83, LITERAL_A: 0x84, EXT_0: 0xC0, EXT_1: 0xC1, EXT_2: 0xC2, OPAQUE: 0xC3, LITERAL_AC: 0xC4, }; /** * Create a constructor for a custom error type that works like a built-in * Error. * * @param name the string name of the error * @param parent (optional) a parent class for the error, defaults to Error * @param extraArgs an array of extra arguments that can be passed to the * constructor of this error type * @return the constructor for this error */ function makeError(name, parent, extraArgs) { function CustomError() { // Try to let users call this as CustomError(...) without the "new". This // is imperfect, and if you call this function directly and give it a // |this| that's a CustomError, things will break. Don't do it! var self = this instanceof CustomError ? this : Object.create(CustomError.prototype); var tmp = Error(); var offset = 1; self.stack = tmp.stack.substring(tmp.stack.indexOf('\n') + 1); self.message = arguments[0] || tmp.message; if (extraArgs) { offset += extraArgs.length; for (var i = 0; i < extraArgs.length; i++) self[extraArgs[i]] = arguments[i+1]; } var m = /@(.+):(.+)/.exec(self.stack); self.fileName = arguments[offset] || (m && m[1]) || ""; self.lineNumber = arguments[offset + 1] || (m && m[2]) || 0; return self; } CustomError.prototype = Object.create((parent || Error).prototype); CustomError.prototype.name = name; CustomError.prototype.constructor = CustomError; return CustomError; } var ParseError = makeError('WBXML.ParseError'); exports.ParseError = ParseError; function StringTable(data, decoder) { this.strings = []; this.offsets = {}; var start = 0; for (var i = 0; i < data.length; i++) { if (data[i] === 0) { this.offsets[start] = this.strings.length; this.strings.push(decoder.decode( data.subarray(start, i) )); start = i + 1; } } } StringTable.prototype = { get: function(offset) { if (offset in this.offsets) return this.strings[this.offsets[offset]]; else { if (offset < 0) throw new ParseError('offset must be >= 0'); var curr = 0; for (var i = 0; i < this.strings.length; i++) { // Add 1 to the current string's length here because we stripped a // null-terminator earlier. if (offset < curr + this.strings[i].length + 1) return this.strings[i].slice(offset - curr); curr += this.strings[i].length + 1; } } throw new ParseError('invalid offset'); }, }; function CompileCodepages(codepages) { codepages.__nsnames__ = {}; codepages.__tagnames__ = {}; codepages.__attrdata__ = {}; for (var iter in Iterator(codepages)) { var name = iter[0], page = iter[1]; if (name.match(/^__/)) continue; if (page.Tags) { var v = Iterator(page.Tags).next(); codepages.__nsnames__[v[1] >> 8] = name; for (var iter2 in Iterator(page.Tags)) { var tag = iter2[0], value = iter2[1]; codepages.__tagnames__[value] = tag; } } if (page.Attrs) { for (var iter3 in Iterator(page.Attrs)) { var attr = iter3[0], data = iter3[1]; if (!('name' in data)) data.name = attr; codepages.__attrdata__[data.value] = data; page.Attrs[attr] = data.value; } } } } exports.CompileCodepages = CompileCodepages; var mib2str = { 3: 'US-ASCII', 4: 'ISO-8859-1', 5: 'ISO-8859-2', 6: 'ISO-8859-3', 7: 'ISO-8859-4', 8: 'ISO-8859-5', 9: 'ISO-8859-6', 10: 'ISO-8859-7', 11: 'ISO-8859-8', 12: 'ISO-8859-9', 13: 'ISO-8859-10', 106: 'UTF-8', }; // TODO: Really, we should build our own map here with synonyms for the // various encodings, but this is a step in the right direction. var str2mib = {}; for (var iter in Iterator(mib2str)) { str2mib[iter[1]] = iter[0]; } function Element(ownerDocument, type, tag) { this.ownerDocument = ownerDocument; this.type = type; this._attrs = {}; if (typeof tag === 'string') { var pieces = tag.split(':'); if (pieces.length === 1) { this.localTagName = pieces[0]; } else { this.namespaceName = pieces[0]; this.localTagName = pieces[1]; } } else { this.tag = tag; Object.defineProperties(this, { 'namespace': { get: function() { return this.tag >> 8; } }, 'localTag': { get: function() { return this.tag & 0xff; } }, 'namespaceName': { get: function() { return this.ownerDocument._codepages.__nsnames__[this.namespace]; } }, 'localTagName': { get: function() { return this.ownerDocument._codepages.__tagnames__[this.tag]; } }, }); } } exports.Element = Element; Element.prototype = { get tagName() { var ns = this.namespaceName; ns = ns ? ns + ':' : ''; return ns + this.localTagName; }, getAttributes: function() { var attributes = []; for (var iter in Iterator(this._attrs)) { var name = iter[0], pieces = iter[1]; var data = name.split(':'); attributes.push({ name: name, namespace: data[0], localName: data[1], value: this._getAttribute(pieces) }); } return attributes; }, getAttribute: function(attr) { if (typeof attr === 'number') attr = this.ownerDocument._codepages.__attrdata__[attr].name; else if (!(attr in this._attrs) && this.namespace !== null && attr.indexOf(':') === -1) attr = this.namespaceName + ':' + attr; return this._getAttribute(this._attrs[attr]); }, _getAttribute: function(pieces) { var strValue = ''; var array = []; for (var iter in Iterator(pieces)) { var hunk = iter[1]; if (hunk instanceof Extension) { if (strValue) { array.push(strValue); strValue = ''; } array.push(hunk); } else if (typeof hunk === 'number') { strValue += this.ownerDocument._codepages.__attrdata__[hunk].data || ''; } else { strValue += hunk; } } if (strValue) array.push(strValue); return array.length === 1 ? array[0] : array; }, _addAttribute: function(attr) { if (typeof attr === 'string') { if (attr in this._attrs) throw new ParseError('attribute '+attr+' is repeated'); return this._attrs[attr] = []; } else { var namespace = attr >> 8; var localAttr = attr & 0xff; var localName = this.ownerDocument._codepages.__attrdata__[localAttr] .name; var nsName = this.ownerDocument._codepages.__nsnames__[namespace]; var name = nsName + ':' + localName; if (name in this._attrs) throw new ParseError('attribute '+name+' is repeated'); return this._attrs[name] = [attr]; } }, }; function EndTag(ownerDocument) { this.ownerDocument = ownerDocument; } exports.EndTag = EndTag; EndTag.prototype = { get type() { return 'ETAG'; }, }; function Text(ownerDocument, textContent) { this.ownerDocument = ownerDocument; this.textContent = textContent; } exports.Text = Text; Text.prototype = { get type() { return 'TEXT'; }, }; function Extension(ownerDocument, subtype, index, value) { this.ownerDocument = ownerDocument; this.subtype = subtype; this.index = index; this.value = value; } exports.Extension = Extension; Extension.prototype = { get type() { return 'EXT'; }, }; function ProcessingInstruction(ownerDocument) { this.ownerDocument = ownerDocument; } exports.ProcessingInstruction = ProcessingInstruction; ProcessingInstruction.prototype = { get type() { return 'PI'; }, get target() { if (typeof this.targetID === 'string') return this.targetID; else return this.ownerDocument._codepages.__attrdata__[this.targetID].name; }, _setTarget: function(target) { this.targetID = target; if (typeof target === 'string') return this._data = []; else return this._data = [target]; }, // XXX: this seems impolite... _getAttribute: Element.prototype._getAttribute, get data() { return this._getAttribute(this._data); }, }; function Opaque(ownerDocument, data) { this.ownerDocument = ownerDocument; this.data = data; } exports.Opaque = Opaque; Opaque.prototype = { get type() { return 'OPAQUE'; }, }; function Reader(data, codepages) { this._data = data instanceof Writer ? data.bytes : data; this._codepages = codepages; this.rewind(); } exports.Reader = Reader; Reader.prototype = { _get_uint8: function() { if (this._index === this._data.length) throw StopIteration; return this._data[this._index++]; }, _get_mb_uint32: function() { var b; var result = 0; do { b = this._get_uint8(); result = result*128 + (b & 0x7f); } while(b & 0x80); return result; }, _get_slice: function(length) { var start = this._index; this._index += length; return this._data.subarray(start, this._index); }, _get_c_string: function() { var start = this._index; while (this._get_uint8()); return this._data.subarray(start, this._index - 1); }, rewind: function() { // Although in theory we could cache this.document since we no longer use // iterators, there is clearly some kind of rep exposure that goes awry // for us, so I'm having us re-do our work. This does not matter in the // normal use-case, just for debugging and just for our test server, which // both rely on rewind(). this._index = 0; var v = this._get_uint8(); this.version = ((v & 0xf0) + 1).toString() + '.' + (v & 0x0f).toString(); this.pid = this._get_mb_uint32(); this.charset = mib2str[this._get_mb_uint32()] || 'unknown'; this._decoder = TextDecoder(this.charset); var tbl_len = this._get_mb_uint32(); this.strings = new StringTable(this._get_slice(tbl_len), this._decoder); this.document = this._getDocument(); }, // start = version publicid charset strtbl body // strtbl = length *byte // body = *pi element *pi // element = stag [ 1*attribute END ] [ *content END ] // // content = element | string | extension | entity | pi | opaque // // stag = TAG | ( LITERAL index ) // attribute = attrStart *attrValue // attrStart = ATTRSTART | ( LITERAL index ) // attrValue = ATTRVALUE | string | extension | entity // // extension = ( EXT_I termstr ) | ( EXT_T index ) | EXT // // string = inline | tableref // inline = STR_I termstr // tableref = STR_T index // // entity = ENTITY entcode // entcode = mb_u_int32 // UCS-4 character code // // pi = PI attrStart *attrValue END // // opaque = OPAQUE length *byte // // version = u_int8 containing WBXML version number // publicid = mb_u_int32 | ( zero index ) // charset = mb_u_int32 // termstr = charset-dependent string with termination // index = mb_u_int32 // integer index into string table. // length = mb_u_int32 // integer length. // zero = u_int8 // containing the value zero (0). _getDocument: function() { // Parser states var States = { BODY: 0, ATTRIBUTES: 1, ATTRIBUTE_PI: 2, }; var state = States.BODY; var currentNode; var currentAttr; var codepage = 0; var depth = 0; var foundRoot = false; var doc = []; var appendString = (function(s) { if (state === States.BODY) { if (!currentNode) currentNode = new Text(this, s); else currentNode.textContent += s; } else { // if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI) currentAttr.push(s); } // We can assume that we're in a valid state, so don't bother checking // here. }).bind(this); try { while (true) { var tok = this._get_uint8(); if (tok === Tokens.SWITCH_PAGE) { codepage = this._get_uint8(); if (!(codepage in this._codepages.__nsnames__)) throw new ParseError('unknown codepage '+codepage); } else if (tok === Tokens.END) { if (state === States.BODY && depth-- > 0) { if (currentNode) { doc.push(currentNode); currentNode = null; } doc.push(new EndTag(this)); } else if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI) { state = States.BODY; doc.push(currentNode); currentNode = null; currentAttr = null; } else { throw new ParseError('unexpected END token'); } } else if (tok === Tokens.ENTITY) { if (state === States.BODY && depth === 0) throw new ParseError('unexpected ENTITY token'); var e = this._get_mb_uint32(); appendString('&#'+e+';'); } else if (tok === Tokens.STR_I) { if (state === States.BODY && depth === 0) throw new ParseError('unexpected STR_I token'); appendString(this._decoder.decode(this._get_c_string())); } else if (tok === Tokens.PI) { if (state !== States.BODY) throw new ParseError('unexpected PI token'); state = States.ATTRIBUTE_PI; if (currentNode) doc.push(currentNode); currentNode = new ProcessingInstruction(this); } else if (tok === Tokens.STR_T) { if (state === States.BODY && depth === 0) throw new ParseError('unexpected STR_T token'); var r = this._get_mb_uint32(); appendString(this.strings.get(r)); } else if (tok === Tokens.OPAQUE) { if (state !== States.BODY) throw new ParseError('unexpected OPAQUE token'); var len = this._get_mb_uint32(); var data = this._get_slice(len); if (currentNode) { doc.push(currentNode); currentNode = null; } doc.push(new Opaque(this, data)); } else if (((tok & 0x40) || (tok & 0x80)) && (tok & 0x3f) < 3) { var hi = tok & 0xc0; var lo = tok & 0x3f; var subtype; var value; if (hi === Tokens.EXT_I_0) { subtype = 'string'; value = this._decoder.decode(this._get_c_string()); } else if (hi === Tokens.EXT_T_0) { subtype = 'integer'; value = this._get_mb_uint32(); } else { // if (hi === Tokens.EXT_0) subtype = 'byte'; value = null; } var ext = new Extension(this, subtype, lo, value); if (state === States.BODY) { if (currentNode) { doc.push(currentNode); currentNode = null; } doc.push(ext); } else { // if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI) currentAttr.push(ext); } } else if (state === States.BODY) { if (depth === 0) { if (foundRoot) throw new ParseError('multiple root nodes found'); foundRoot = true; } var tag = (codepage << 8) + (tok & 0x3f); if ((tok & 0x3f) === Tokens.LITERAL) { var r = this._get_mb_uint32(); tag = this.strings.get(r); } if (currentNode) doc.push(currentNode); currentNode = new Element(this, (tok & 0x40) ? 'STAG' : 'TAG', tag); if (tok & 0x40) depth++; if (tok & 0x80) { state = States.ATTRIBUTES; } else { state = States.BODY; doc.push(currentNode); currentNode = null; } } else { // if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI) var attr = (codepage << 8) + tok; if (!(tok & 0x80)) { if (tok === Tokens.LITERAL) { var r = this._get_mb_uint32(); attr = this.strings.get(r); } if (state === States.ATTRIBUTE_PI) { if (currentAttr) throw new ParseError('unexpected attribute in PI'); currentAttr = currentNode._setTarget(attr); } else { currentAttr = currentNode._addAttribute(attr); } } else { currentAttr.push(attr); } } } } catch (e) { if (!(e instanceof StopIteration)) throw e; } return doc; }, dump: function(indentation, header) { var result = ''; if (indentation === undefined) indentation = 2; var indent = function(level) { return new Array(level*indentation + 1).join(' '); }; var tagstack = []; if (header) { result += 'Version: ' + this.version + '\n'; result += 'Public ID: ' + this.pid + '\n'; result += 'Charset: ' + this.charset + '\n'; result += 'String table:\n "' + this.strings.strings.join('"\n "') + '"\n\n'; } var newline = false; var doc = this.document; var doclen = doc.length; for (var iNode = 0; iNode < doclen; iNode++) { var node = doc[iNode]; if (node.type === 'TAG' || node.type === 'STAG') { result += indent(tagstack.length) + '<' + node.tagName; var attributes = node.getAttributes(); for (var i = 0; i < attributes.length; i++) { var attr = attributes[i]; result += ' ' + attr.name + '="' + attr.value + '"'; } if (node.type === 'STAG') { tagstack.push(node.tagName); result += '>\n'; } else result += '/>\n'; } else if (node.type === 'ETAG') { var tag = tagstack.pop(); result += indent(tagstack.length) + '</' + tag + '>\n'; } else if (node.type === 'TEXT') { result += indent(tagstack.length) + node.textContent + '\n'; } else if (node.type === 'PI') { result += indent(tagstack.length) + '<?' + node.target; if (node.data) result += ' ' + node.data; result += '?>\n'; } else if (node.type === 'OPAQUE') { result += indent(tagstack.length) + '<![CDATA[' + node.data + ']]>\n'; } else { throw new Error('Unknown node type "' + node.type + '"'); } } return result; }, }; function Writer(version, pid, charset, strings) { this._rawbuf = new ArrayBuffer(1024); this._buffer = new Uint8Array(this._rawbuf); this._pos = 0; this._codepage = 0; this._tagStack = []; var infos = version.split('.').map(function(x) { return parseInt(x); }); var major = infos[0], minor = infos[1]; var v = ((major - 1) << 4) + minor; var charsetNum = charset; if (typeof charset === 'string') { charsetNum = str2mib[charset]; if (charsetNum === undefined) throw new Error('unknown charset '+charset); } var encoder = this._encoder = TextEncoder(charset); this._write(v); this._write(pid); this._write(charsetNum); if (strings) { var bytes = strings.map(function(s) { return encoder.encode(s); }); var len = bytes.reduce(function(x, y) { return x + y.length + 1; }, 0); this._write_mb_uint32(len); for (var iter in Iterator(bytes)) { var b = iter[1]; this._write_bytes(b); this._write(0x00); } } else { this._write(0x00); } } exports.Writer = Writer; Writer.Attribute = function(name, value) { this.isValue = typeof name === 'number' && (name & 0x80); if (this.isValue && value !== undefined) throw new Error("Can't specify a value for attribute value constants"); this.name = name; this.value = value; }; Writer.StringTableRef = function(index) { this.index = index; }; Writer.Entity = function(code) { this.code = code; }; Writer.Extension = function(subtype, index, data) { var validTypes = { 'string': { value: Tokens.EXT_I_0, validator: function(data) { return typeof data === 'string'; } }, 'integer': { value: Tokens.EXT_T_0, validator: function(data) { return typeof data === 'number'; } }, 'byte': { value: Tokens.EXT_0, validator: function(data) { return data === null || data === undefined; } }, }; var info = validTypes[subtype]; if (!info) throw new Error('Invalid WBXML Extension type'); if (!info.validator(data)) throw new Error('Data for WBXML Extension does not match type'); if (index !== 0 && index !== 1 && index !== 2) throw new Error('Invalid WBXML Extension index'); this.subtype = info.value; this.index = index; this.data = data; }; Writer.a = function(name, val) { return new Writer.Attribute(name, val); }; Writer.str_t = function(index) { return new Writer.StringTableRef(index); }; Writer.ent = function(code) { return new Writer.Entity(code) }; Writer.ext = function(subtype, index, data) { return new Writer.Extension( subtype, index, data); }; Writer.prototype = { _write: function(tok) { // Expand the buffer by a factor of two if we ran out of space. if (this._pos === this._buffer.length - 1) { this._rawbuf = new ArrayBuffer(this._rawbuf.byteLength * 2); var buffer = new Uint8Array(this._rawbuf); for (var i = 0; i < this._buffer.length; i++) buffer[i] = this._buffer[i]; this._buffer = buffer; } this._buffer[this._pos++] = tok; }, _write_mb_uint32: function(value) { var bytes = []; bytes.push(value % 0x80); while (value >= 0x80) { value >>= 7; bytes.push(0x80 + (value % 0x80)); } for (var i = bytes.length - 1; i >= 0; i--) this._write(bytes[i]); }, _write_bytes: function(bytes) { for (var i = 0; i < bytes.length; i++) this._write(bytes[i]); }, _write_str: function(str) { this._write_bytes(this._encoder.encode(str)); }, _setCodepage: function(codepage) { if (this._codepage !== codepage) { this._write(Tokens.SWITCH_PAGE); this._write(codepage); this._codepage = codepage; } }, _writeTag: function(tag, stag, attrs) { if (tag === undefined) throw new Error('unknown tag'); var flags = 0x00; if (stag) flags += 0x40; if (attrs.length) flags += 0x80; if (tag instanceof Writer.StringTableRef) { this._write(Tokens.LITERAL + flags); this._write_mb_uint32(tag.index); } else { this._setCodepage(tag >> 8); this._write((tag & 0xff) + flags); } if (attrs.length) { for (var iter in Iterator(attrs)) { var attr = iter[1]; this._writeAttr(attr); } this._write(Tokens.END); } }, _writeAttr: function(attr) { if (!(attr instanceof Writer.Attribute)) throw new Error('Expected an Attribute object'); if (attr.isValue) throw new Error("Can't use attribute value constants here"); if (attr.name instanceof Writer.StringTableRef) { this._write(Tokens.LITERAL); this._write(attr.name.index); } else { this._setCodepage(attr.name >> 8); this._write(attr.name & 0xff); } this._writeText(attr.value, true); }, _writeText: function(value, inAttr) { if (Array.isArray(value)) { for (var iter in Iterator(value)) { var piece = iter[1]; this._writeText(piece, inAttr); } } else if (value instanceof Writer.StringTableRef) { this._write(Tokens.STR_T); this._write_mb_uint32(value.index); } else if (value instanceof Writer.Entity) { this._write(Tokens.ENTITY); this._write_mb_uint32(value.code); } else if (value instanceof Writer.Extension) { this._write(value.subtype + value.index); if (value.subtype === Tokens.EXT_I_0) { this._write_str(value.data); this._write(0x00); } else if (value.subtype === Tokens.EXT_T_0) { this._write_mb_uint32(value.data); } } else if (value instanceof Writer.Attribute) { if (!value.isValue) throw new Error('Unexpected Attribute object'); if (!inAttr) throw new Error("Can't use attribute value constants outside of " + "attributes"); this._setCodepage(value.name >> 8); this._write(value.name & 0xff); } else if (value !== null && value !== undefined) { this._write(Tokens.STR_I); this._write_str(value.toString()); this._write(0x00); } }, tag: function(tag) { var tail = arguments.length > 1 ? arguments[arguments.length - 1] : null; if (tail === null || tail instanceof Writer.Attribute) { var rest = Array.prototype.slice.call(arguments, 1); this._writeTag(tag, false, rest); return this; } else { var head = Array.prototype.slice.call(arguments, 0, -1); return this.stag.apply(this, head) .text(tail) .etag(); } }, stag: function(tag) { var rest = Array.prototype.slice.call(arguments, 1); this._writeTag(tag, true, rest); this._tagStack.push(tag); return this; }, etag: function(tag) { if (this._tagStack.length === 0) throw new Error('Spurious etag() call!'); var expectedTag = this._tagStack.pop(); if (tag !== undefined && tag !== expectedTag) throw new Error('Closed the wrong tag'); this._write(Tokens.END); return this; }, text: function(value) { this._writeText(value); return this; }, pi: function(target, data) { this._write(Tokens.PI); this._writeAttr(Writer.a(target, data)); this._write(Tokens.END); return this; }, ext: function(subtype, index, data) { return this.text(Writer.ext(subtype, index, data)); }, opaque: function(data) { this._write(Tokens.OPAQUE); this._write_mb_uint32(data.length); if (typeof data === 'string') { this._write_str(data); } else { for (var i = 0; i < data.length; i++) this._write(data[i]); } return this; }, get buffer() { return this._rawbuf.slice(0, this._pos); }, get bytes() { return new Uint8Array(this._rawbuf, 0, this._pos); }, }; function EventParser() { this.listeners = []; this.onerror = function(e) { throw e; }; } exports.EventParser = EventParser; EventParser.prototype = { addEventListener: function(path, callback) { this.listeners.push({path: path, callback: callback}); }, _pathMatches: function(a, b) { return a.length === b.length && a.every(function(val, i) { if (b[i] === '*') return true; else if (Array.isArray(b[i])) { return b[i].indexOf(val) !== -1; } else return val === b[i]; }); }, run: function(reader) { var fullPath = []; var recPath = []; var recording = 0; var doc = reader.document; var doclen = doc.length; for (var iNode = 0; iNode < doclen; iNode++) { var node = doc[iNode]; if (node.type === 'TAG') { fullPath.push(node.tag); for (var iter in Iterator(this.listeners)) { var listener = iter[1]; if (this._pathMatches(fullPath, listener.path)) { node.children = []; try { listener.callback(node); } catch (e) { if (this.onerror) this.onerror(e); } } } fullPath.pop(); } else if (node.type === 'STAG') { fullPath.push(node.tag); for (var iter in Iterator(this.listeners)) { var listener = iter[1]; if (this._pathMatches(fullPath, listener.path)) { recording++; } } } else if (node.type === 'ETAG') { for (var iter in Iterator(this.listeners)) { var listener = iter[1]; if (this._pathMatches(fullPath, listener.path)) { recording--; try { listener.callback(recPath[recPath.length-1]); } catch (e) { if (this.onerror) this.onerror(e); } } } fullPath.pop(); } if (recording) { if (node.type === 'STAG') { node.type = 'TAG'; node.children = []; if (recPath.length) recPath[recPath.length-1].children.push(node); recPath.push(node); } else if (node.type === 'ETAG') { recPath.pop(); } else { node.children = []; recPath[recPath.length-1].children.push(node); } } } }, }; return exports; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Common',[], factory); else root.ASCPCommon = factory(); }(this, function() { 'use strict'; return { Enums: { Status: { InvalidContent: '101', InvalidWBXML: '102', InvalidXML: '103', InvalidDateTime: '104', InvalidCombinationOfIDs: '105', InvalidIDs: '106', InvalidMIME: '107', DeviceIdMissingOrInvalid: '108', DeviceTypeMissingOrInvalid: '109', ServerError: '110', ServerErrorRetryLater: '111', ActiveDirectoryAccessDenied: '112', MailboxQuotaExceeded: '113', MailboxServerOffline: '114', SendQuotaExceeded: '115', MessageRecipientUnresolved: '116', MessageReplyNotAllowed: '117', MessagePreviouslySent: '118', MessageHasNoRecipient: '119', MailSubmissionFailed: '120', MessageReplyFailed: '121', AttachmentIsTooLarge: '122', UserHasNoMailbox: '123', UserCannotBeAnonymous: '124', UserPrincipalCouldNotBeFound: '125', UserDisabledForSync: '126', UserOnNewMailboxCannotSync: '127', UserOnLegacyMailboxCannotSync: '128', DeviceIsBlockedForThisUser: '129', AccessDenied: '130', AccountDisabled: '131', SyncStateNotFound: '132', SyncStateLocked: '133', SyncStateCorrupt: '134', SyncStateAlreadyExists: '135', SyncStateVersionInvalid: '136', CommandNotSupported: '137', VersionNotSupported: '138', DeviceNotFullyProvisionable: '139', RemoteWipeRequested: '140', LegacyDeviceOnStrictPolicy: '141', DeviceNotProvisioned: '142', PolicyRefresh: '143', InvalidPolicyKey: '144', ExternallyManagedDevicesNotAllowed: '145', NoRecurrenceInCalendar: '146', UnexpectedItemClass: '147', RemoteServerHasNoSSL: '148', InvalidStoredRequest: '149', ItemNotFound: '150', TooManyFolders: '151', NoFoldersFounds: '152', ItemsLostAfterMove: '153', FailureInMoveOperation: '154', MoveCommandDisallowedForNonPersistentMoveAction: '155', MoveCommandInvalidDestinationFolder: '156', AvailabilityTooManyRecipients: '160', AvailabilityDLLimitReached: '161', AvailabilityTransientFailure: '162', AvailabilityFailure: '163', BodyPartPreferenceTypeNotSupported: '164', DeviceInformationRequired: '165', InvalidAccountId: '166', AccountSendDisabled: '167', IRM_FeatureDisabled: '168', IRM_TransientError: '169', IRM_PermanentError: '170', IRM_InvalidTemplateID: '171', IRM_OperationNotPermitted: '172', NoPicture: '173', PictureTooLarge: '174', PictureLimitReached: '175', BodyPart_ConversationTooLarge: '176', MaximumDevicesReached: '177', }, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Contacts',[], factory); else root.ASCPContacts = factory(); }(this, function() { 'use strict'; return { Tags: { Anniversary: 0x0105, AssistantName: 0x0106, AssistantPhoneNumber: 0x0107, Birthday: 0x0108, Body: 0x0109, BodySize: 0x010A, BodyTruncated: 0x010B, Business2PhoneNumber: 0x010C, BusinessAddressCity: 0x010D, BusinessAddressCountry: 0x010E, BusinessAddressPostalCode: 0x010F, BusinessAddressState: 0x0110, BusinessAddressStreet: 0x0111, BusinessFaxNumber: 0x0112, BusinessPhoneNumber: 0x0113, CarPhoneNumber: 0x0114, Categories: 0x0115, Category: 0x0116, Children: 0x0117, Child: 0x0118, CompanyName: 0x0119, Department: 0x011A, Email1Address: 0x011B, Email2Address: 0x011C, Email3Address: 0x011D, FileAs: 0x011E, FirstName: 0x011F, Home2PhoneNumber: 0x0120, HomeAddressCity: 0x0121, HomeAddressCountry: 0x0122, HomeAddressPostalCode: 0x0123, HomeAddressState: 0x0124, HomeAddressStreet: 0x0125, HomeFaxNumber: 0x0126, HomePhoneNumber: 0x0127, JobTitle: 0x0128, LastName: 0x0129, MiddleName: 0x012A, MobilePhoneNumber: 0x012B, OfficeLocation: 0x012C, OtherAddressCity: 0x012D, OtherAddressCountry: 0x012E, OtherAddressPostalCode: 0x012F, OtherAddressState: 0x0130, OtherAddressStreet: 0x0131, PagerNumber: 0x0132, RadioPhoneNumber: 0x0133, Spouse: 0x0134, Suffix: 0x0135, Title: 0x0136, WebPage: 0x0137, YomiCompanyName: 0x0138, YomiFirstName: 0x0139, YomiLastName: 0x013A, CompressedRTF: 0x013B, Picture: 0x013C, Alias: 0x013D, WeightedRank: 0x013E, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Calendar',[], factory); else root.ASCPCalendar = factory(); }(this, function() { 'use strict'; return { Tags: { TimeZone: 0x0405, AllDayEvent: 0x0406, Attendees: 0x0407, Attendee: 0x0408, Email: 0x0409, Name: 0x040A, Body: 0x040B, BodyTruncated: 0x040C, BusyStatus: 0x040D, Categories: 0x040E, Category: 0x040F, CompressedRTF: 0x0410, DtStamp: 0x0411, EndTime: 0x0412, Exception: 0x0413, Exceptions: 0x0414, Deleted: 0x0415, ExceptionStartTime: 0x0416, Location: 0x0417, MeetingStatus: 0x0418, OrganizerEmail: 0x0419, OrganizerName: 0x041A, Recurrence: 0x041B, Type: 0x041C, Until: 0x041D, Occurrences: 0x041E, Interval: 0x041F, DayOfWeek: 0x0420, DayOfMonth: 0x0421, WeekOfMonth: 0x0422, MonthOfYear: 0x0423, Reminder: 0x0424, Sensitivity: 0x0425, Subject: 0x0426, StartTime: 0x0427, UID: 0x0428, AttendeeStatus: 0x0429, AttendeeType: 0x042A, Attachment: 0x042B, Attachments: 0x042C, AttName: 0x042D, AttSize: 0x042E, AttOid: 0x042F, AttMethod: 0x0430, AttRemoved: 0x0431, DisplayName: 0x0432, DisallowNewTimeProposal: 0x0433, ResponseRequested: 0x0434, AppointmentReplyTime: 0x0435, ResponseType: 0x0436, CalendarType: 0x0437, IsLeapMonth: 0x0438, FirstDayOfWeek: 0x0439, OnlineMeetingConfLink: 0x043A, OnlineMeetingExternalLink: 0x043B, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/MeetingResponse',[], factory); else root.ASCPMeetingResponse = factory(); }(this, function() { 'use strict'; return { Tags: { CalendarId: 0x0805, CollectionId: 0x0806, MeetingResponse: 0x0807, RequestId: 0x0808, Request: 0x0809, Result: 0x080A, Status: 0x080B, UserResponse: 0x080C, InstanceId: 0x080E, }, Enums: { Status: { Success: '1', InvalidRequest: '2', MailboxError: '3', ServerError: '4', }, UserResponse: { Accepted: '1', Tentative: '2', Declined: '3', }, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Tasks',[], factory); else root.ASCPTasks = factory(); }(this, function() { 'use strict'; return { Tags: { Body: 0x0905, BodySize: 0x0906, BodyTruncated: 0x0907, Categories: 0x0908, Category: 0x0909, Complete: 0x090A, DateCompleted: 0x090B, DueDate: 0x090C, UtcDueDate: 0x090D, Importance: 0x090E, Recurrence: 0x090F, Recurrence_Type: 0x0910, Recurrence_Start: 0x0911, Recurrence_Until: 0x0912, Recurrence_Occurrences: 0x0913, Recurrence_Interval: 0x0914, Recurrence_DayOfMonth: 0x0915, Recurrence_DayOfWeek: 0x0916, Recurrence_WeekOfMonth: 0x0917, Recurrence_MonthOfYear: 0x0918, Recurrence_Regenerate: 0x0919, Recurrence_DeadOccur: 0x091A, ReminderSet: 0x091B, ReminderTime: 0x091C, Sensitivity: 0x091D, StartDate: 0x091E, UtcStartDate: 0x091F, Subject: 0x0920, CompressedRTF: 0x0921, OrdinalDate: 0x0922, SubOrdinalDate: 0x0923, CalendarType: 0x0924, IsLeapMonth: 0x0925, FirstDayOfWeek: 0x0926, } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/ResolveRecipients',[], factory); else root.ASCPResolveRecipients = factory(); }(this, function() { 'use strict'; return { Tags: { ResolveRecipients: 0x0A05, Response: 0x0A06, Status: 0x0A07, Type: 0x0A08, Recipient: 0x0A09, DisplayName: 0x0A0A, EmailAddress: 0x0A0B, Certificates: 0x0A0C, Certificate: 0x0A0D, MiniCertificate: 0x0A0E, Options: 0x0A0F, To: 0x0A10, CertificateRetrieval: 0x0A11, RecipientCount: 0x0A12, MaxCertificates: 0x0A13, MaxAmbiguousRecipients: 0x0A14, CertificateCount: 0x0A15, Availability: 0x0A16, StartTime: 0x0A17, EndTime: 0x0A18, MergedFreeBusy: 0x0A19, Picture: 0x0A1A, MaxSize: 0x0A1B, Data: 0x0A1C, MaxPictures: 0x0A1D, }, Enums: { Status: { Success: '1', AmbiguousRecipientFull: '2', AmbiguousRecipientPartial: '3', RecipientNotFound: '4', ProtocolError: '5', ServerError: '6', InvalidSMIMECert: '7', CertLimitReached: '8', }, CertificateRetrieval: { None: '1', Full: '2', Mini: '3', }, MergedFreeBusy: { Free: '0', Tentative: '1', Busy: '2', Oof: '3', NoData: '4', }, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/ValidateCert',[], factory); else root.ASCPValidateCert = factory(); }(this, function() { 'use strict'; return { Tags: { ValidateCert: 0x0B05, Certificates: 0x0B06, Certificate: 0x0B07, CertificateChain: 0x0B08, CheckCRL: 0x0B09, Status: 0x0B0A, }, Enums: { Status: { Success: '1', ProtocolError: '2', InvalidSignature: '3', UntrustedSource: '4', InvalidChain: '5', NotForEmail: '6', Expired: '7', InconsistentTimes: '8', IdMisused: '9', MissingInformation: '10', CAEndMismatch: '11', EmailAddressMismatch: '12', Revoked: '13', ServerOffline: '14', ChainRevoked: '15', RevocationUnknown: '16', UnknownError: '17', }, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Contacts2',[], factory); else root.ASCPContacts2 = factory(); }(this, function() { 'use strict'; return { Tags: { CustomerId: 0x0C05, GovernmentId: 0x0C06, IMAddress: 0x0C07, IMAddress2: 0x0C08, IMAddress3: 0x0C09, ManagerName: 0x0C0A, CompanyMainPhone: 0x0C0B, AccountName: 0x0C0C, NickName: 0x0C0D, MMS: 0x0C0E, } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Ping',[], factory); else root.ASCPPing = factory(); }(this, function() { 'use strict'; return { Tags: { Ping: 0x0D05, AutdState: 0x0D06, Status: 0x0D07, HeartbeatInterval: 0x0D08, Folders: 0x0D09, Folder: 0x0D0A, Id: 0x0D0B, Class: 0x0D0C, MaxFolders: 0x0D0D, }, Enums: { Status: { Expired: '1', Changed: '2', MissingParameters: '3', SyntaxError: '4', InvalidInterval: '5', TooManyFolders: '6', SyncFolders: '7', ServerError: '8', }, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Provision',[], factory); else root.ASCPProvision = factory(); }(this, function() { 'use strict'; return { Tags: { Provision: 0x0E05, Policies: 0x0E06, Policy: 0x0E07, PolicyType: 0x0E08, PolicyKey: 0x0E09, Data: 0x0E0A, Status: 0x0E0B, RemoteWipe: 0x0E0C, EASProvisionDoc: 0x0E0D, DevicePasswordEnabled: 0x0E0E, AlphanumericDevicePasswordRequired: 0x0E0F, DeviceEncryptionEnabled: 0x0E10, RequireStorageCardEncryption: 0x0E10, PasswordRecoveryEnabled: 0x0E11, AttachmentsEnabled: 0x0E13, MinDevicePasswordLength: 0x0E14, MaxInactivityTimeDeviceLock: 0x0E15, MaxDevicePasswordFailedAttempts: 0x0E16, MaxAttachmentSize: 0x0E17, AllowSimpleDevicePassword: 0x0E18, DevicePasswordExpiration: 0x0E19, DevicePasswordHistory: 0x0E1A, AllowStorageCard: 0x0E1B, AllowCamera: 0x0E1C, RequireDeviceEncryption: 0x0E1D, AllowUnsignedApplications: 0x0E1E, AllowUnsignedInstallationPackages: 0x0E1F, MinDevicePasswordComplexCharacters: 0x0E20, AllowWiFi: 0x0E21, AllowTextMessaging: 0x0E22, AllowPOPIMAPEmail: 0x0E23, AllowBluetooth: 0x0E24, AllowIrDA: 0x0E25, RequireManualSyncWhenRoaming: 0x0E26, AllowDesktopSync: 0x0E27, MaxCalendarAgeFilter: 0x0E28, AllowHTMLEmail: 0x0E29, MaxEmailAgeFilter: 0x0E2A, MaxEmailBodyTruncationSize: 0x0E2B, MaxEmailHTMLBodyTruncationSize: 0x0E2C, RequireSignedSMIMEMessages: 0x0E2D, RequireEncryptedSMIMEMessages: 0x0E2E, RequireSignedSMIMEAlgorithm: 0x0E2F, RequireEncryptionSMIMEAlgorithm: 0x0E30, AllowSMIMEEncryptionAlgorithmNegotiation: 0x0E31, AllowSMIMESoftCerts: 0x0E32, AllowBrowser: 0x0E33, AllowConsumerEmail: 0x0E34, AllowRemoteDesktop: 0x0E35, AllowInternetSharing: 0x0E36, UnapprovedInROMApplicationList: 0x0E37, ApplicationName: 0x0E38, ApprovedApplicationList: 0x0E39, Hash: 0x0E3A, } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Search',[], factory); else root.ASCPSearch = factory(); }(this, function() { 'use strict'; return { Tags: { Search: 0x0F05, Stores: 0x0F06, Store: 0x0F07, Name: 0x0F08, Query: 0x0F09, Options: 0x0F0A, Range: 0x0F0B, Status: 0x0F0C, Response: 0x0F0D, Result: 0x0F0E, Properties: 0x0F0F, Total: 0x0F10, EqualTo: 0x0F11, Value: 0x0F12, And: 0x0F13, Or: 0x0F14, FreeText: 0x0F15, DeepTraversal: 0x0F17, LongId: 0x0F18, RebuildResults: 0x0F19, LessThan: 0x0F1A, GreaterThan: 0x0F1B, Schema: 0x0F1C, Supported: 0x0F1D, UserName: 0x0F1E, Password: 0x0F1F, ConversationId: 0x0F20, Picture: 0x0F21, MaxSize: 0x0F22, MaxPictures: 0x0F23, }, Enums: { Status: { Success: '1', InvalidRequest: '2', ServerError: '3', BadLink: '4', AccessDenied: '5', NotFound: '6', ConnectionFailure: '7', TooComplex: '8', Timeout: '10', SyncFolders: '11', EndOfRange: '12', AccessBlocked: '13', CredentialsRequired: '14', } } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/GAL',[], factory); else root.ASCPGAL = factory(); }(this, function() { 'use strict'; return { Tags: { DisplayName: 0x1005, Phone: 0x1006, Office: 0x1007, Title: 0x1008, Company: 0x1009, Alias: 0x100A, FirstName: 0x100B, LastName: 0x100C, HomePhone: 0x100D, MobilePhone: 0x100E, EmailAddress: 0x100F, Picture: 0x1010, Status: 0x1011, Data: 0x1012, } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Settings',[], factory); else root.ASCPSettings = factory(); }(this, function() { 'use strict'; return { Tags: { Settings: 0x1205, Status: 0x1206, Get: 0x1207, Set: 0x1208, Oof: 0x1209, OofState: 0x120A, StartTime: 0x120B, EndTime: 0x120C, OofMessage: 0x120D, AppliesToInternal: 0x120E, AppliesToExternalKnown: 0x120F, AppliesToExternalUnknown: 0x1210, Enabled: 0x1211, ReplyMessage: 0x1212, BodyType: 0x1213, DevicePassword: 0x1214, Password: 0x1215, DeviceInformation: 0x1216, Model: 0x1217, IMEI: 0x1218, FriendlyName: 0x1219, OS: 0x121A, OSLanguage: 0x121B, PhoneNumber: 0x121C, UserInformation: 0x121D, EmailAddresses: 0x121E, SmtpAddress: 0x121F, UserAgent: 0x1220, EnableOutboundSMS: 0x1221, MobileOperator: 0x1222, PrimarySmtpAddress: 0x1223, Accounts: 0x1224, Account: 0x1225, AccountId: 0x1226, AccountName: 0x1227, UserDisplayName: 0x1228, SendDisabled: 0x1229, /* Missing tag value 0x122A */ RightsManagementInformation: 0x122B, }, Enums: { Status: { Success: '1', ProtocolError: '2', AccessDenied: '3', ServerError: '4', InvalidArguments: '5', ConflictingArguments: '6', DeniedByPolicy: '7', }, OofState: { Disabled: '0', Global: '1', TimeBased: '2', } } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/DocumentLibrary',[], factory); else root.ASCPDocumentLibrary = factory(); }(this, function() { 'use strict'; return { Tags: { LinkId: 0x1305, DisplayName: 0x1306, IsFolder: 0x1307, CreationDate: 0x1308, LastModifiedDate: 0x1309, IsHidden: 0x130A, ContentLength: 0x130B, ContentType: 0x130C, }, }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Email2',[], factory); else root.ASCPEmail2 = factory(); }(this, function() { 'use strict'; return { Tags: { UmCallerID: 0x1605, UmUserNotes: 0x1606, UmAttDuration: 0x1607, UmAttOrder: 0x1608, ConversationId: 0x1609, ConversationIndex: 0x160A, LastVerbExecuted: 0x160B, LastVerbExecutionTime: 0x160C, ReceivedAsBcc: 0x160D, Sender: 0x160E, CalendarType: 0x160F, IsLeapMonth: 0x1610, AccountId: 0x1611, FirstDayOfWeek: 0x1612, MeetingMessageType: 0x1613, }, Enums: { LastVerbExecuted: { Unknown: '0', ReplyToSender: '1', ReplyToAll: '2', Forward: '3', }, CalendarType: { Default: '0', Gregorian: '1', GregorianUS: '2', Japan: '3', Taiwan: '4', Korea: '5', Hijri: '6', Thai: '7', Hebrew: '8', GregorianMeFrench: '9', GregorianArabic: '10', GregorianTranslatedEnglish: '11', GregorianTranslatedFrench: '12', JapaneseLunar: '14', ChineseLunar: '15', KoreanLunar: '20', }, FirstDayOfWeek: { Sunday: '0', Monday: '1', Tuesday: '2', Wednesday: '3', Thursday: '4', Friday: '5', Saturday: '6', }, MeetingMessageType: { Unspecified: '0', InitialRequest: '1', FullUpdate: '2', InformationalUpdate: '3', Outdated: '4', DelegatorsCopy: '5', Delegated: '6', } } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/Notes',[], factory); else root.ASCPNotes = factory(); }(this, function() { 'use strict'; return { Tags: { Subject: 0x1705, MessageClass: 0x1706, LastModifiedDate: 0x1707, Categories: 0x1708, Category: 0x1709, } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(); else if (typeof define === 'function' && define.amd) define('activesync/codepages/RightsManagement',[], factory); else root.ASCPRightsManagement = factory(); }(this, function() { 'use strict'; return { Tags: { RightsManagementSupport: 0x1805, RightsManagementTemplates: 0x1806, RightsManagementTemplate: 0x1807, RightsManagementLicense: 0x1808, EditAllowed: 0x1809, ReplyAllowed: 0x180A, ReplyAllAllowed: 0x180B, ForwardAllowed: 0x180C, ModifyRecipientsAllowed: 0x180D, ExtractAllowed: 0x180E, PrintAllowed: 0x180F, ExportAllowed: 0x1810, ProgrammaticAccessAllowed: 0x1811, Owner: 0x1812, ContentExpiryDate: 0x1813, TemplateID: 0x1814, TemplateName: 0x1815, TemplateDescription: 0x1816, ContentOwner: 0x1817, RemoveRightsManagementDistribution: 0x1818, } }; })); /* Copyright 2012 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function (root, factory) { if (typeof exports === 'object') module.exports = factory(require('wbxml'), require('activesync/codepages')); else if (typeof define === 'function' && define.amd) define('activesync/protocol',['wbxml', 'activesync/codepages'], factory); else root.ActiveSyncProtocol = factory(WBXML, ActiveSyncCodepages); }(this, function(WBXML, ASCP) { 'use strict'; var exports = {}; function nullCallback() {} /** * Create a constructor for a custom error type that works like a built-in * Error. * * @param name the string name of the error * @param parent (optional) a parent class for the error, defaults to Error * @param extraArgs an array of extra arguments that can be passed to the * constructor of this error type * @return the constructor for this error */ function makeError(name, parent, extraArgs) { function CustomError() { // Try to let users call this as CustomError(...) without the "new". This // is imperfect, and if you call this function directly and give it a // |this| that's a CustomError, things will break. Don't do it! var self = this instanceof CustomError ? this : Object.create(CustomError.prototype); var tmp = Error(); var offset = 1; self.stack = tmp.stack.substring(tmp.stack.indexOf('\n') + 1); self.message = arguments[0] || tmp.message; if (extraArgs) { offset += extraArgs.length; for (var i = 0; i < extraArgs.length; i++) self[extraArgs[i]] = arguments[i+1]; } var m = /@(.+):(.+)/.exec(self.stack); self.fileName = arguments[offset] || (m && m[1]) || ""; self.lineNumber = arguments[offset + 1] || (m && m[2]) || 0; return self; } CustomError.prototype = Object.create((parent || Error).prototype); CustomError.prototype.name = name; CustomError.prototype.constructor = CustomError; return CustomError; } var AutodiscoverError = makeError('ActiveSync.AutodiscoverError'); exports.AutodiscoverError = AutodiscoverError; var AutodiscoverDomainError = makeError('ActiveSync.AutodiscoverDomainError', AutodiscoverError); exports.AutodiscoverDomainError = AutodiscoverDomainError; var HttpError = makeError('ActiveSync.HttpError', null, ['status']); exports.HttpError = HttpError; function nsResolver(prefix) { var baseUrl = 'http://schemas.microsoft.com/exchange/autodiscover/'; var ns = { rq: baseUrl + 'mobilesync/requestschema/2006', ad: baseUrl + 'responseschema/2006', ms: baseUrl + 'mobilesync/responseschema/2006', }; return ns[prefix] || null; } function Version(str) { var details = str.split('.').map(function(x) { return parseInt(x); }); this.major = details[0], this.minor = details[1]; } exports.Version = Version; Version.prototype = { eq: function(other) { if (!(other instanceof Version)) other = new Version(other); return this.major === other.major && this.minor === other.minor; }, ne: function(other) { return !this.eq(other); }, gt: function(other) { if (!(other instanceof Version)) other = new Version(other); return this.major > other.major || (this.major === other.major && this.minor > other.minor); }, gte: function(other) { if (!(other instanceof Version)) other = new Version(other); return this.major >= other.major || (this.major === other.major && this.minor >= other.minor); }, lt: function(other) { return !this.gte(other); }, lte: function(other) { return !this.gt(other); }, toString: function() { return this.major + '.' + this.minor; }, }; /** * Set the Authorization header on an XMLHttpRequest. * * @param xhr the XMLHttpRequest * @param username the username * @param password the user's password */ function setAuthHeader(xhr, username, password) { var authorization = 'Basic ' + btoa(username + ':' + password); xhr.setRequestHeader('Authorization', authorization); } /** * Perform autodiscovery for the server associated with this account. * * @param aEmailAddress the user's email address * @param aPassword the user's password * @param aTimeout a timeout (in milliseconds) for the request * @param aCallback a callback taking an error status (if any) and the * server's configuration * @param aNoRedirect true if autodiscovery should *not* follow any * specified redirects (typically used when autodiscover has already * told us about a redirect) */ function autodiscover(aEmailAddress, aPassword, aTimeout, aCallback, aNoRedirect) { if (!aCallback) aCallback = nullCallback; var domain = aEmailAddress.substring(aEmailAddress.indexOf('@') + 1); // The first time we try autodiscovery, we should try to recover from // AutodiscoverDomainErrors and HttpErrors. The second time, *all* errors // should be reported to the callback. do_autodiscover(domain, aEmailAddress, aPassword, aTimeout, aNoRedirect, function(aError, aConfig) { if (aError instanceof AutodiscoverDomainError || aError instanceof HttpError) do_autodiscover('autodiscover.' + domain, aEmailAddress, aPassword, aTimeout, aNoRedirect, aCallback); else aCallback(aError, aConfig); }); } exports.autodiscover = autodiscover; /** * Perform the actual autodiscovery process for a given URL. * * @param aHost the host name to attempt autodiscovery for * @param aEmailAddress the user's email address * @param aPassword the user's password * @param aTimeout a timeout (in milliseconds) for the request * @param aNoRedirect true if autodiscovery should *not* follow any * specified redirects (typically used when autodiscover has already * told us about a redirect) * @param aCallback a callback taking an error status (if any) and the * server's configuration */ function do_autodiscover(aHost, aEmailAddress, aPassword, aTimeout, aNoRedirect, aCallback) { var xhr = new XMLHttpRequest({mozSystem: true, mozAnon: true}); xhr.open('POST', 'https://' + aHost + '/autodiscover/autodiscover.xml', true); setAuthHeader(xhr, aEmailAddress, aPassword); xhr.setRequestHeader('Content-Type', 'text/xml'); xhr.timeout = aTimeout; xhr.upload.onprogress = xhr.upload.onload = function() { xhr.timeout = 0; }; xhr.onload = function() { if (xhr.status < 200 || xhr.status >= 300) return aCallback(new HttpError(xhr.statusText, xhr.status)); var uid = Math.random(); self.postMessage({ uid: uid, type: 'configparser', cmd: 'accountactivesync', args: [xhr.responseText] }); self.addEventListener('message', function onworkerresponse(evt) { var data = evt.data; if (data.type != 'configparser' || data.cmd != 'accountactivesync' || data.uid != uid) { return; } self.removeEventListener(evt.type, onworkerresponse); var args = data.args; var config = args[0], error = args[1], redirectedEmail = args[2]; if (error) { aCallback(new AutodiscoverDomainError(error), config); } else if (redirectedEmail) { autodiscover(redirectedEmail, aPassword, aTimeout, aCallback, true); } else { aCallback(null, config); } }); }; xhr.ontimeout = xhr.onerror = function() { aCallback(new Error('Error getting Autodiscover URL')); }; // TODO: use something like // http://ejohn.org/blog/javascript-micro-templating/ here? var postdata = '<?xml version="1.0" encoding="utf-8"?>\n' + '<Autodiscover xmlns="' + nsResolver('rq') + '">\n' + ' <Request>\n' + ' <EMailAddress>' + aEmailAddress + '</EMailAddress>\n' + ' <AcceptableResponseSchema>' + nsResolver('ms') + '</AcceptableResponseSchema>\n' + ' </Request>\n' + '</Autodiscover>'; xhr.send(postdata); } /** * Create a new ActiveSync connection. * * ActiveSync connections use XMLHttpRequests to communicate with the * server. These XHRs are created with mozSystem: true and mozAnon: true to, * respectively, help with CORS, and to ignore the authentication cache. The * latter is important because 1) it prevents the HTTP auth dialog from * appearing if the user's credentials are wrong and 2) it allows us to * connect to the same server as multiple users. * * @param aDeviceId (optional) a string identifying this device * @param aDeviceType (optional) a string identifying the type of this device */ function Connection(aDeviceId, aDeviceType) { this._deviceId = aDeviceId || 'v140Device'; this._deviceType = aDeviceType || 'SmartPhone'; this.timeout = 0; this._connected = false; this._waitingForConnection = false; this._connectionError = null; this._connectionCallbacks = []; this.baseUrl = null; this._username = null; this._password = null; this.versions = []; this.supportedCommands = []; this.currentVersion = null; } exports.Connection = Connection; Connection.prototype = { /** * Perform any callbacks added during the connection process. * * @param aError the error status (if any) */ _notifyConnected: function(aError) { if (aError) this.disconnect(); for (var iter in Iterator(this._connectionCallbacks)) { var callback = iter[1]; callback.apply(callback, arguments); } this._connectionCallbacks = []; }, /** * Get the connection status. * * @return true iff we are fully connected to the server */ get connected() { return this._connected; }, /* * Initialize the connection with a server and account credentials. * * @param aServer the ActiveSync server to connect to * @param aUsername the account's username * @param aPassword the account's password */ open: function(aServer, aUsername, aPassword) { this.baseUrl = aServer + '/Microsoft-Server-ActiveSync'; this._username = aUsername; this._password = aPassword; }, /** * Connect to the server with this account by getting the OPTIONS from * the server (and verifying the account's credentials). * * @param aCallback a callback taking an error status (if any) and the * server's options. */ connect: function(aCallback) { // If we're already connected, just run the callback and return. if (this.connected) { if (aCallback) aCallback(null); return; } // Otherwise, queue this callback up to fire when we do connect. if (aCallback) this._connectionCallbacks.push(aCallback); // Don't do anything else if we're already trying to connect. if (this._waitingForConnection) return; this._waitingForConnection = true; this._connectionError = null; this.getOptions((function(aError, aOptions) { this._waitingForConnection = false; this._connectionError = aError; if (aError) { console.error('Error connecting to ActiveSync:', aError); return this._notifyConnected(aError, aOptions); } this._connected = true; this.versions = aOptions.versions; this.supportedCommands = aOptions.commands; this.currentVersion = new Version(aOptions.versions.slice(-1)[0]); return this._notifyConnected(null, aOptions); }).bind(this)); }, /** * Disconnect from the ActiveSync server, and reset the connection state. * The server and credentials remain set however, so you can safely call * connect() again immediately after. */ disconnect: function() { if (this._waitingForConnection) throw new Error("Can't disconnect while waiting for server response"); this._connected = false; this.versions = []; this.supportedCommands = []; this.currentVersion = null; }, /** * Attempt to provision this account. XXX: Currently, this doesn't actually * do anything, but it's useful as a test command for Gmail to ensure that * the user entered their password correctly. * * @param aCallback a callback taking an error status (if any) and the * WBXML response */ provision: function(aCallback) { var pv = ASCP.Provision.Tags; var w = new WBXML.Writer('1.3', 1, 'UTF-8'); w.stag(pv.Provision) .etag(); this.postCommand(w, aCallback); }, /** * Get the options for the server associated with this account. * * @param aCallback a callback taking an error status (if any), and the * resulting options. */ getOptions: function(aCallback) { if (!aCallback) aCallback = nullCallback; var conn = this; var xhr = new XMLHttpRequest({mozSystem: true, mozAnon: true}); xhr.open('OPTIONS', this.baseUrl, true); setAuthHeader(xhr, this._username, this._password); xhr.timeout = this.timeout; xhr.upload.onprogress = xhr.upload.onload = function() { xhr.timeout = 0; }; xhr.onload = function() { if (xhr.status < 200 || xhr.status >= 300) { console.error('ActiveSync options request failed with response ' + xhr.status); aCallback(new HttpError(xhr.statusText, xhr.status)); return; } var result = { versions: xhr.getResponseHeader('MS-ASProtocolVersions').split(','), commands: xhr.getResponseHeader('MS-ASProtocolCommands').split(','), }; aCallback(null, result); }; xhr.ontimeout = xhr.onerror = function() { var error = new Error('Error getting OPTIONS URL'); console.error(error); aCallback(error); }; // Set the response type to "text" so that we don't try to parse an empty // body as XML. xhr.responseType = 'text'; xhr.send(); }, /** * Check if the server supports a particular command. Requires that we be * connected to the server already. * * @param aCommand a string/tag representing the command type * @return true iff the command is supported */ supportsCommand: function(aCommand) { if (!this.connected) throw new Error('Connection required to get command'); if (typeof aCommand === 'number') aCommand = ASCP.__tagnames__[aCommand]; return this.supportedCommands.indexOf(aCommand) !== -1; }, /** * DEPRECATED. See postCommand() below. */ doCommand: function() { console.warn('doCommand is deprecated. Use postCommand instead.'); this.postCommand.apply(this, arguments); }, /** * Send a WBXML command to the ActiveSync server and listen for the * response. * * @param aCommand the WBXML representing the command or a string/tag * representing the command type for empty commands * @param aCallback a callback to call when the server has responded; takes * two arguments: an error status (if any) and the response as a * WBXML reader. If the server returned an empty response, the * response argument is null. * @param aExtraParams (optional) an object containing any extra URL * parameters that should be added to the end of the request URL * @param aExtraHeaders (optional) an object containing any extra HTTP * headers to send in the request * @param aProgressCallback (optional) a callback to invoke with progress * information, when available. Two arguments are provided: the * number of bytes received so far, and the total number of bytes * expected (when known, 0 if unknown). */ postCommand: function(aCommand, aCallback, aExtraParams, aExtraHeaders, aProgressCallback) { var contentType = 'application/vnd.ms-sync.wbxml'; if (typeof aCommand === 'string' || typeof aCommand === 'number') { this.postData(aCommand, contentType, null, aCallback, aExtraParams, aExtraHeaders); } else { var r = new WBXML.Reader(aCommand, ASCP); var commandName = r.document[0].localTagName; this.postData(commandName, contentType, aCommand.buffer, aCallback, aExtraParams, aExtraHeaders, aProgressCallback); } }, /** * Send arbitrary data to the ActiveSync server and listen for the response. * * @param aCommand a string (or WBXML tag) representing the command type * @param aContentType the content type of the post data * @param aData the data to be posted * @param aCallback a callback to call when the server has responded; takes * two arguments: an error status (if any) and the response as a * WBXML reader. If the server returned an empty response, the * response argument is null. * @param aExtraParams (optional) an object containing any extra URL * parameters that should be added to the end of the request URL * @param aExtraHeaders (optional) an object containing any extra HTTP * headers to send in the request * @param aProgressCallback (optional) a callback to invoke with progress * information, when available. Two arguments are provided: the * number of bytes received so far, and the total number of bytes * expected (when known, 0 if unknown). */ postData: function(aCommand, aContentType, aData, aCallback, aExtraParams, aExtraHeaders, aProgressCallback) { // Make sure our command name is a string. if (typeof aCommand === 'number') aCommand = ASCP.__tagnames__[aCommand]; if (!this.supportsCommand(aCommand)) { var error = new Error("This server doesn't support the command " + aCommand); console.error(error); aCallback(error); return; } // Build the URL parameters. var params = [ ['Cmd', aCommand], ['User', this._username], ['DeviceId', this._deviceId], ['DeviceType', this._deviceType] ]; if (aExtraParams) { for (var iter in Iterator(params)) { var param = iter[1]; if (param[0] in aExtraParams) throw new TypeError('reserved URL parameter found'); } for (var kv in Iterator(aExtraParams)) params.push(kv); } var paramsStr = params.map(function(i) { return encodeURIComponent(i[0]) + '=' + encodeURIComponent(i[1]); }).join('&'); // Now it's time to make our request! var xhr = new XMLHttpRequest({mozSystem: true, mozAnon: true}); xhr.open('POST', this.baseUrl + '?' + paramsStr, true); setAuthHeader(xhr, this._username, this._password); xhr.setRequestHeader('MS-ASProtocolVersion', this.currentVersion); xhr.setRequestHeader('Content-Type', aContentType); // Add extra headers if we have any. if (aExtraHeaders) { for (var iter in Iterator(aExtraHeaders)) { var key = iter[0], key = iter[1]; xhr.setRequestHeader(key, value); } } xhr.timeout = this.timeout; xhr.upload.onprogress = xhr.upload.onload = function() { xhr.timeout = 0; }; xhr.onprogress = function(event) { if (aProgressCallback) aProgressCallback(event.loaded, event.total); }; var conn = this; var parentArgs = arguments; xhr.onload = function() { // This status code is a proprietary Microsoft extension used to // indicate a redirect, not to be confused with the draft-standard // "Unavailable For Legal Reasons" status. More info available here: // <http://msdn.microsoft.com/en-us/library/gg651019.aspx> if (xhr.status === 451) { conn.baseUrl = xhr.getResponseHeader('X-MS-Location'); conn.postData.apply(conn, parentArgs); return; } if (xhr.status < 200 || xhr.status >= 300) { console.error('ActiveSync command ' + aCommand + ' failed with ' + 'response ' + xhr.status); aCallback(new HttpError(xhr.statusText, xhr.status)); return; } var response = null; if (xhr.response.byteLength > 0) response = new WBXML.Reader(new Uint8Array(xhr.response), ASCP); aCallback(null, response); }; xhr.ontimeout = xhr.onerror = function() { var error = new Error('Error getting command URL'); console.error(error); aCallback(error); }; xhr.responseType = 'arraybuffer'; xhr.send(aData); }, }; return exports; }));
sergecodd/FireFox-OS
B2G/gaia/apps/email/js/ext/mailapi/activesync/protocollayer.js
JavaScript
apache-2.0
98,854
package org.apache.commons.digester3.plugins; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.Properties; import org.apache.commons.digester3.Digester; /** * Each concrete implementation of RuleFinder is an algorithm for locating a source of digester rules for a plugin. The * algorithm may use info explicitly provided by the user as part of the plugin declaration, or not (in which case the * concrete RuleFinder subclass typically has Dflt as part of its name). * <p> * Instances of this class can also be regarded as a Factory for RuleLoaders, except that an instance of a RuleLoader is * only created if the particular finder algorithm can locate a suitable source of rules given the plugin class and * associated properties. * <p> * This is an abstract class rather than an interface in order to make it possible to enhance this class in future * without breaking binary compatibility; it is possible to add methods to an abstract class, but not to an interface. * * @since 1.6 */ public abstract class RuleFinder { /** * Apply the finder algorithm to attempt to locate a source of digester rules for the specified plugin class. * <p> * This method is invoked when a plugin is declared by the user, either via an explicit use of * PluginDeclarationRule, or implicitly via an "inline declaration" where the declaration and use are simultaneous. * <p> * If dynamic rules for the specified plugin class are located, then the RuleFinder will return a RuleLoader object * encapsulating those rules, and this object will be invoked each time the user actually requests an instance of * the declared plugin class, to load the custom rules associated with that plugin instance. * <p> * If no dynamic rules can be found, null is returned. This is not an error; merely an indication that this * particular algorithm found no matches. * <p> * The properties object holds any xml attributes the user may have specified on the plugin declaration in order to * indicate how to locate the plugin rules. * <p> * * @param d The digester instance where locating plugin classes * @param pluginClass The plugin Java class * @param p The properties object that holds any xml attributes the user may have specified on the plugin * declaration in order to indicate how to locate the plugin rules. * @return a source of digester rules for the specified plugin class. * @throws PluginException if the algorithm finds a source of rules, but there is something invalid * about that source. */ public abstract RuleLoader findLoader( Digester d, Class<?> pluginClass, Properties p ) throws PluginException; }
apache/commons-digester
core/src/main/java/org/apache/commons/digester3/plugins/RuleFinder.java
Java
apache-2.0
3,535
/** * @license Copyright 2017 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ 'use strict'; /** * Manages drag and drop file input for the page. */ class DragAndDrop { /** * @param {function(!File)} fileHandlerCallback Invoked when the user chooses a new file. */ constructor(fileHandlerCallback) { this._dropZone = document.querySelector('.drop_zone'); this._fileHandlerCallback = fileHandlerCallback; this._dragging = false; this._addListeners(); } _addListeners() { // The mouseleave event is more reliable than dragleave when the user drops // the file outside the window. document.addEventListener('mouseleave', _ => { if (!this._dragging) { return; } this._resetDraggingUI(); }); document.addEventListener('dragover', e => { e.stopPropagation(); e.preventDefault(); e.dataTransfer.dropEffect = 'copy'; // Explicitly show as copy action. }); document.addEventListener('dragenter', _ => { this._dropZone.classList.add('dropping'); this._dragging = true; }); document.addEventListener('drop', e => { e.stopPropagation(); e.preventDefault(); this._resetDraggingUI(); // Note, this ignores multiple files in the drop, only taking the first. this._fileHandlerCallback(e.dataTransfer.files[0]); }); } _resetDraggingUI() { this._dropZone.classList.remove('dropping'); this._dragging = false; } } if (typeof module !== 'undefined' && module.exports) { module.exports = DragAndDrop; }
tkadlec/lighthouse
lighthouse-viewer/app/src/drag-and-drop.js
JavaScript
apache-2.0
2,079
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.job.tools; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author xduo * */ public class DefaultX509TrustManager implements X509TrustManager { /** Log object for this class. */ private static Logger LOG = LoggerFactory.getLogger(DefaultX509TrustManager.class); private X509TrustManager standardTrustManager = null; /** * Constructor for DefaultX509TrustManager. * */ public DefaultX509TrustManager(KeyStore keystore) throws NoSuchAlgorithmException, KeyStoreException { super(); TrustManagerFactory factory = TrustManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); factory.init(keystore); TrustManager[] trustmanagers = factory.getTrustManagers(); if (trustmanagers.length == 0) { throw new NoSuchAlgorithmException("SunX509 trust manager not supported"); } this.standardTrustManager = (X509TrustManager) trustmanagers[0]; } public X509Certificate[] getAcceptedIssuers() { return this.standardTrustManager.getAcceptedIssuers(); } public boolean isClientTrusted(X509Certificate[] certificates) { return true; // return this.standardTrustManager.isClientTrusted(certificates); } public boolean isServerTrusted(X509Certificate[] certificates) { if ((certificates != null) && LOG.isDebugEnabled()) { LOG.debug("Server certificate chain:"); for (int i = 0; i < certificates.length; i++) { if (LOG.isDebugEnabled()) { LOG.debug("X509Certificate[" + i + "]=" + certificates[i]); } } } if ((certificates != null) && (certificates.length == 1)) { X509Certificate certificate = certificates[0]; try { certificate.checkValidity(); } catch (CertificateException e) { LOG.error(e.toString()); return false; } return true; } else { return true; // return this.standardTrustManager.isServerTrusted(certificates); } } @Override public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { // TODO Auto-generated method stub } @Override public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { // TODO Auto-generated method stub } }
lemire/incubator-kylin
job/src/main/java/org/apache/kylin/job/tools/DefaultX509TrustManager.java
Java
apache-2.0
3,718
#!/usr/bin/env python # Copyright 2016 Criteo # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """List Command.""" from __future__ import print_function from biggraphite.cli import command from biggraphite.glob_utils import graphite_glob def list_metrics(accessor, pattern, graphite=True): """Return the list of metrics corresponding to pattern. Exit with error message if None. Args: accessor: Accessor, a connected accessor pattern: string, e.g. my.metric.a or my.metric.**.a Optional Args: graphite: bool, use graphite globbing if True. Returns: iterable(Metric) """ if not graphite: metrics_names = accessor.glob_metric_names(pattern) else: metrics, _ = graphite_glob( accessor, pattern, metrics=True, directories=False ) metrics_names = [metric.name for metric in metrics] for metric in metrics_names: if metric is None: continue yield accessor.get_metric(metric) class CommandList(command.BaseCommand): """List for metrics.""" NAME = "list" HELP = "List metrics." def add_arguments(self, parser): """Add custom arguments. See command.CommandBase. """ parser.add_argument("glob", help="One metric name or globbing on metrics names") parser.add_argument( "--graphite", default=False, action="store_true", help="Enable Graphite globbing", ) def run(self, accessor, opts): """List metrics and directories. See command.CommandBase. """ accessor.connect() if not opts.graphite: directories_names = accessor.glob_directory_names(opts.glob) else: _, directories_names = graphite_glob( accessor, opts.glob, metrics=False, directories=True ) for directory in directories_names: print("d %s" % directory) for metric in list_metrics(accessor, opts.glob, opts.graphite): if metric: print("m %s %s" % (metric.name, metric.metadata.as_string_dict()))
Thib17/biggraphite
biggraphite/cli/command_list.py
Python
apache-2.0
2,668
/* * Copyright 2012 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Most functions in this file are similar to the posix version. libc // file API is used since PlatformFile API misses some necessary functions. #include "base/file_util.h" #include <dirent.h> #include <fcntl.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <fstream> #include "base/logging.h" #include "base/string_util.h" #include "base/threading/thread_restrictions.h" #include "base/time.h" #include "lb_globals.h" namespace file_util { void GetRandomString(int output_size, char *output) { snprintf(output, output_size, "%x%x%x%x", static_cast<unsigned int>(time(NULL)), static_cast<unsigned int>(clock()), rand(), rand()); } bool CreateTemporaryFile(FilePath* path) { base::ThreadRestrictions::AssertIOAllowed(); FilePath temp_file; if (!GetTempDir(path)) return false; if (CreateTemporaryFileInDir(*path, &temp_file)) { *path = temp_file; return true; } return false; } FILE* CreateAndOpenTemporaryFileInDir(const FilePath& dir, FilePath* path) { base::ThreadRestrictions::AssertIOAllowed(); for (int i = 0; i < 3; i++) { // Try create a new temporary file with random generated name. If // the one exists, keep trying another filename until we reach some limit. char random_string[48]; GetRandomString(sizeof(random_string) - 4, random_string); strncat(random_string, ".tmp", sizeof(random_string) - strlen(random_string) - 1); FilePath new_path = dir.Append(random_string); // Make sure file doesn't already exist struct stat file_info; if (stat(new_path.value().c_str(), &file_info) == 0) continue; FILE *file = fopen(new_path.value().c_str(), "w+"); if (file) { *path = new_path; return file; } } return NULL; } bool CreateTemporaryFileInDir(const FilePath& dir, FilePath* temp_file) { FILE *file = CreateAndOpenTemporaryFileInDir(dir, temp_file); if (file) { fclose(file); return true; } return false; } bool GetFileInfo(const FilePath& file_path, base::PlatformFileInfo* results) { DCHECK(results); struct stat _stat; int rv = stat(file_path.value().c_str(), &_stat); if (rv == 0) { results->creation_time = base::Time::FromTimeT(_stat.st_ctime); results->is_directory = _stat.st_mode & S_IFDIR; results->is_symbolic_link = _stat.st_mode & S_IFLNK; results->last_accessed = base::Time::FromTimeT(_stat.st_atime); results->last_modified = base::Time::FromTimeT(_stat.st_mtime); results->size = _stat.st_size; return true; } return false; } bool GetTempDir(FilePath* path) { *path = FilePath(GetGlobalsPtr()->tmp_path).Append("tmp"); // Ensure path exists int result = mkdir(path->value().c_str(), 0700); return (result == 0) || (errno == EEXIST); } bool GetShmemTempDir(FilePath* path, bool executable) { return GetTempDir(path); } bool CreateNewTempDirectory(const FilePath::StringType& prefix, FilePath* new_temp_path) { FilePath tmpdir; if (!GetTempDir(&tmpdir)) return false; return CreateTemporaryDirInDir(tmpdir, prefix, new_temp_path); } bool CreateTemporaryDirInDir(const FilePath& base_dir, const FilePath::StringType& prefix, FilePath* new_dir) { // based on version from file_util_win.cc base::ThreadRestrictions::AssertIOAllowed(); FilePath path_to_create; for (int count = 0; count < 3; ++count) { // Try create a new temporary directory with random generated name. If // the one exists, keep trying another path name until we reach some limit. char random_string[48]; GetRandomString(sizeof(random_string), random_string); path_to_create = base_dir.Append(random_string); if (mkdir(path_to_create.value().c_str(), 0700) == 0) { *new_dir = path_to_create; return true; } } return false; } bool PathExists(const FilePath& path) { return access(path.value().c_str(), F_OK) == 0; } bool AbsolutePath(FilePath* path) { // Some platforms may not have realpath(), so we just check to make sure // every path is absolute already. // see file_util_posix.cc for the original version. DCHECK_EQ(path->value().c_str()[0], '/'); return true; } int CountFilesCreatedAfter(const FilePath& path, const base::Time& comparison_time) { // Copied and modified from file_util_posix.cc base::ThreadRestrictions::AssertIOAllowed(); int file_count = 0; DIR* dir = opendir(path.value().c_str()); if (dir) { struct dirent* ent; while ((ent = readdir(dir)) != NULL) { if ((strcmp(ent->d_name, ".") == 0) || (strcmp(ent->d_name, "..") == 0)) continue; struct stat st; int test = stat(path.Append(ent->d_name).value().c_str(), &st); if (test != 0) { DPLOG(ERROR) << "stat failed"; continue; } // Here, we use Time::TimeT(), which discards microseconds. This // means that files which are newer than |comparison_time| may // be considered older. If we don't discard microseconds, it // introduces another issue. Suppose the following case: // // 1. Get |comparison_time| by Time::Now() and the value is 10.1 (secs). // 2. Create a file and the current time is 10.3 (secs). // // As POSIX doesn't have microsecond precision for |st_ctime|, // the creation time of the file created in the step 2 is 10 and // the file is considered older than |comparison_time|. After // all, we may have to accept either of the two issues: 1. files // which are older than |comparison_time| are considered newer // (current implementation) 2. files newer than // |comparison_time| are considered older. if (static_cast<time_t>(st.st_ctime) >= comparison_time.ToTimeT()) ++file_count; } closedir(dir); } return file_count; } bool NormalizeFilePath(const FilePath& path, FilePath* normalized_path) { // We don't support parent references. if (path.ReferencesParent()) return false; // We do support relative paths. // To be consistant with windows, fail if path_result is a directory. struct stat file_info; if (stat(path.value().c_str(), &file_info) != 0 || S_ISDIR(file_info.st_mode)) { return false; } *normalized_path = path; return true; } bool DirectoryExists(const FilePath& path) { base::PlatformFileInfo info; if (!GetFileInfo(path, &info)) { return false; } return info.is_directory; } bool GetPosixFilePermissions(const FilePath& path, int* mode) { base::ThreadRestrictions::AssertIOAllowed(); DCHECK(mode); struct stat stat_buf; if (stat(path.value().c_str(), &stat_buf) != 0) return false; *mode = stat_buf.st_mode & FILE_PERMISSION_MASK; return true; } bool SetPosixFilePermissions(const FilePath& path, int mode) { base::ThreadRestrictions::AssertIOAllowed(); DCHECK_EQ(mode & ~FILE_PERMISSION_MASK, 0); struct stat stat_buf; if (stat(path.value().c_str(), &stat_buf) != 0) return false; // Clears the existing permission bits, and adds the new ones. mode_t updated_mode_bits = stat_buf.st_mode & ~FILE_PERMISSION_MASK; updated_mode_bits |= mode & FILE_PERMISSION_MASK; if (HANDLE_EINTR(chmod(path.value().c_str(), updated_mode_bits)) != 0) return false; return true; } FILE* OpenFile(const std::string& filename, const char* mode) { return OpenFile(FilePath(filename), mode); } FILE* OpenFile(const FilePath& filename, const char* mode) { base::ThreadRestrictions::AssertIOAllowed(); FILE* result = NULL; do { result = fopen(filename.value().c_str(), mode); } while (!result && errno == EINTR); return result; } int WriteFile(const FilePath& filename, const char* data, int size) { base::ThreadRestrictions::AssertIOAllowed(); int fd = HANDLE_EINTR(open(filename.value().c_str(), O_CREAT|O_WRONLY|O_TRUNC, 0666)); if (fd < 0) return -1; int bytes_written = WriteFileDescriptor(fd, data, size); if (int ret = HANDLE_EINTR(close(fd)) < 0) return ret; return bytes_written; } int WriteFileDescriptor(const int fd, const char* data, int size) { // Allow for partial writes. ssize_t bytes_written_total = 0; for (ssize_t bytes_written_partial = 0; bytes_written_total < size; bytes_written_total += bytes_written_partial) { bytes_written_partial = HANDLE_EINTR(write(fd, data + bytes_written_total, size - bytes_written_total)); if (bytes_written_partial < 0) return -1; } return bytes_written_total; } int AppendToFile(const FilePath& filename, const char* data, int size) { base::ThreadRestrictions::AssertIOAllowed(); int fd = HANDLE_EINTR(open(filename.value().c_str(), O_WRONLY | O_APPEND)); if (fd < 0) return -1; int bytes_written = WriteFileDescriptor(fd, data, size); if (int ret = HANDLE_EINTR(close(fd)) < 0) return ret; return bytes_written; } bool Move(const FilePath& from_path, const FilePath& to_path) { base::ThreadRestrictions::AssertIOAllowed(); // Windows compatibility: if to_path exists, from_path and to_path // must be the same type, either both files, or both directories. struct stat to_file_info; if (stat(to_path.value().c_str(), &to_file_info) == 0) { struct stat from_file_info; if (stat(from_path.value().c_str(), &from_file_info) == 0) { if (S_ISDIR(to_file_info.st_mode) != S_ISDIR(from_file_info.st_mode)) return false; } else { return false; } } if (rename(from_path.value().c_str(), to_path.value().c_str()) == 0) return true; if (!CopyDirectory(from_path, to_path, true)) return false; Delete(from_path, true); return true; } bool CopyDirectory(const FilePath& from_path, const FilePath& to_path, bool recursive) { // Some old callers of CopyDirectory want it to support wildcards. // After some discussion, we decided to fix those callers. // Break loudly here if anyone tries to do this. // TODO(evanm): remove this once we're sure it's ok. DCHECK(to_path.value().find('*') == std::string::npos); DCHECK(from_path.value().find('*') == std::string::npos); // This function does not properly handle destinations within the source FilePath real_to_path = to_path; if (PathExists(real_to_path)) { if (!AbsolutePath(&real_to_path)) return false; } else { real_to_path = real_to_path.DirName(); if (!AbsolutePath(&real_to_path)) return false; } FilePath real_from_path = from_path; if (!AbsolutePath(&real_from_path)) return false; if (real_to_path.value().size() >= real_from_path.value().size() && real_to_path.value().compare(0, real_from_path.value().size(), real_from_path.value()) == 0) return false; bool success = true; int traverse_type = FileEnumerator::FILES | FileEnumerator::SHOW_SYM_LINKS; if (recursive) traverse_type |= FileEnumerator::DIRECTORIES; FileEnumerator traversal(from_path, recursive, traverse_type); // We have to mimic windows behavior here. |to_path| may not exist yet, // start the loop with |to_path|. FileEnumerator::FindInfo info; FilePath current = from_path; if (stat(from_path.value().c_str(), &info.stat) < 0) { DLOG(ERROR) << "CopyDirectory() couldn't stat source directory: " << from_path.value() << " errno = " << errno; success = false; } struct stat to_path_stat; FilePath from_path_base = from_path; if (recursive && stat(to_path.value().c_str(), &to_path_stat) == 0 && S_ISDIR(to_path_stat.st_mode)) { // If the destination already exists and is a directory, then the // top level of source needs to be copied. from_path_base = from_path.DirName(); } // The Windows version of this function assumes that non-recursive calls // will always have a directory for from_path. DCHECK(recursive || S_ISDIR(info.stat.st_mode)); while (success && !current.empty()) { // current is the source path, including from_path, so append // the suffix after from_path to to_path to create the target_path. FilePath target_path(to_path); if (from_path_base != current) { if (!from_path_base.AppendRelativePath(current, &target_path)) { success = false; break; } } if (S_ISDIR(info.stat.st_mode)) { if (mkdir(target_path.value().c_str(), info.stat.st_mode & 01777) != 0 && errno != EEXIST) { DLOG(ERROR) << "CopyDirectory() couldn't create directory: " << target_path.value() << " errno = " << errno; success = false; } } else if (S_ISREG(info.stat.st_mode)) { if (!CopyFile(current, target_path)) { DLOG(ERROR) << "CopyDirectory() couldn't create file: " << target_path.value(); success = false; } } else { DLOG(WARNING) << "CopyDirectory() skipping non-regular file: " << current.value(); } current = traversal.Next(); traversal.GetFindInfo(&info); } return success; } bool CreateDirectory(const FilePath& full_path) { std::vector<FilePath> subpaths; // Collect a list of all parent directories, but not including root. FilePath last_path = full_path; for (FilePath path = full_path.DirName(); path.value() != last_path.value(); path = path.DirName()) { subpaths.push_back(last_path); last_path = path; } // Iterate through the parents and create the missing ones. for (std::vector<FilePath>::reverse_iterator i = subpaths.rbegin(); i != subpaths.rend(); ++i) { if (DirectoryExists(*i)) continue; if (mkdir(i->value().c_str(), 0700) == 0) continue; // Mkdir failed, but it might have failed with EEXIST, or some other error // due to the the directory appearing out of thin air. This can occur if // two processes are trying to create the same file system tree at the same // time. Check to see if it exists and make sure it is a directory. if (!DirectoryExists(*i)) return false; } return true; } bool Delete(const FilePath& path, bool recursive) { const char* path_str = path.value().c_str(); base::PlatformFileInfo info; // Return true if the path doesn't exist. if (!PathExists(path)) { return true; } if (!GetFileInfo(path, &info)) { return false; } if (!info.is_directory) return (unlink(path_str) == 0); if (!recursive) return (rmdir(path_str) == 0); bool success = true; std::stack<std::string> directories; directories.push(path.value()); FileEnumerator traversal(path, true, static_cast<FileEnumerator::FileType>( FileEnumerator::FILES | FileEnumerator::DIRECTORIES | FileEnumerator::SHOW_SYM_LINKS)); for (FilePath current = traversal.Next(); success && !current.empty(); current = traversal.Next()) { FileEnumerator::FindInfo info; traversal.GetFindInfo(&info); if (S_ISDIR(info.stat.st_mode)) directories.push(current.value()); else success = (unlink(current.value().c_str()) == 0); } while (success && !directories.empty()) { FilePath dir = FilePath(directories.top()); directories.pop(); success = (rmdir(dir.value().c_str()) == 0); } return success; } int ReadFile(const FilePath& filename, char* data, int size) { int fd = open(filename.value().c_str(), O_RDONLY); if (fd < 0) { DLOG(ERROR) << "file_util::ReadFile failed on " << filename.value(); return -1; } ssize_t bytes_read = read(fd, data, size); close(fd); return bytes_read; } bool GetCurrentDirectory(FilePath* dir) { *dir = FilePath(GetGlobalsPtr()->game_content_path); return true; } bool ReplaceFile(const FilePath& from_path, const FilePath& to_path) { int ret = rename(from_path.value().c_str(), to_path.value().c_str()); return (ret == 0); } bool CopyFile(const FilePath& from_path, const FilePath& to_path) { int infile = open(from_path.value().c_str(), O_RDONLY); if (infile < 0) return false; // Get file info so we can set the same permissions on the copied file struct stat file_info; fstat(infile, &file_info); int outfile = open(to_path.value().c_str(), O_CREAT | O_WRONLY, file_info.st_mode); if (outfile < 0) { close(infile); return false; } const size_t kBufferSize = 32768; std::vector<char> buffer(kBufferSize); bool result = true; int64 offset = 0; while (result) { ssize_t bytes_read = read(infile, &buffer[0], buffer.size()); if (bytes_read <= 0) { result = bytes_read == 0; break; } // Allow for partial writes ssize_t bytes_written_per_read = 0; do { ssize_t bytes_written_partial = write( outfile, &buffer[bytes_written_per_read], bytes_read - bytes_written_per_read); if (bytes_written_partial < 0) { result = false; break; } bytes_written_per_read += bytes_written_partial; } while (bytes_written_per_read < bytes_read); offset += bytes_read; } if (HANDLE_EINTR(close(infile)) < 0) result = false; if (HANDLE_EINTR(close(outfile)) < 0) result = false; return result; } bool ReadFromFD(int fd, char* buffer, size_t bytes) { size_t total_read = 0; while (total_read < bytes) { ssize_t bytes_read = HANDLE_EINTR(read(fd, buffer + total_read, bytes - total_read)); if (bytes_read <= 0) break; total_read += bytes_read; } return total_read == bytes; } /////////////////////////////////////////////// // FileEnumerator FileEnumerator::FileEnumerator(const FilePath& root_path, bool recursive, int file_type) : current_directory_entry_(0) , root_path_(root_path) , recursive_(recursive) , file_type_(file_type) { DCHECK(!(recursive && (INCLUDE_DOT_DOT & file_type_))); pending_paths_.push(root_path); } FileEnumerator::FileEnumerator(const FilePath& root_path, bool recursive, int file_type, const FilePath::StringType& pattern) : current_directory_entry_(0) , root_path_(root_path) , recursive_(recursive) , file_type_(file_type) { DCHECK(!(recursive && (INCLUDE_DOT_DOT & file_type_))); if (pattern.empty()) { pattern_ = FilePath::StringType(); } else { pattern_ = root_path.Append(pattern).value(); } // Allow a trailing asterisk, but no other wildcards. size_t pattern_len = pattern_.size(); size_t asterisk_pos = pattern_.find('*'); DCHECK((asterisk_pos == pattern_len - 1) || (asterisk_pos == std::string::npos)); DCHECK_EQ(pattern_.find_first_of("[]?"), std::string::npos); pending_paths_.push(root_path); } FileEnumerator::~FileEnumerator() { } // An overly simplified version of fnmatch() that only works if the // asterisk is at the end of the pattern. static bool MatchWildcardStrings(const std::string& pattern, const std::string& str) { const size_t pattern_len = pattern.size(); DCHECK(pattern_len); if (pattern[pattern_len - 1] != '*') { // No asterisk found at all. Do a regular string compare. return pattern == str; } else { // If the strings are the same up to the asterisk, then the pattern matches. return pattern.compare(0, pattern_len - 1, str, 0, pattern_len - 1) == 0; } } FilePath FileEnumerator::Next() { ++current_directory_entry_; // While we've exhausted the entries in the current directory, do the next while (current_directory_entry_ >= directory_entries_.size()) { if (pending_paths_.empty()) return FilePath(); root_path_ = pending_paths_.top(); root_path_ = root_path_.StripTrailingSeparators(); pending_paths_.pop(); std::vector<DirectoryEntryInfo> entries; if (!ReadDirectory(&entries, root_path_, file_type_ & SHOW_SYM_LINKS)) continue; directory_entries_.clear(); current_directory_entry_ = 0; for (std::vector<DirectoryEntryInfo>::const_iterator i = entries.begin(); i != entries.end(); ++i) { FilePath full_path = root_path_.Append(i->filename); if (ShouldSkip(full_path)) continue; // We may not have fnmatch() function, so use a simplified version // to check strings that have only an asterisk at the end. Anything else // should assert in the constructor. if (pattern_.size() && !MatchWildcardStrings(pattern_, full_path.value())) { continue; } if (recursive_ && S_ISDIR(i->stat.st_mode)) pending_paths_.push(full_path); if ((S_ISDIR(i->stat.st_mode) && (file_type_ & DIRECTORIES)) || (!S_ISDIR(i->stat.st_mode) && (file_type_ & FILES))) directory_entries_.push_back(*i); } } return root_path_.Append( directory_entries_[current_directory_entry_].filename); } void FileEnumerator::GetFindInfo(FindInfo* info) { DCHECK(info); if (current_directory_entry_ >= directory_entries_.size()) return; DirectoryEntryInfo* cur_entry = &directory_entries_[current_directory_entry_]; memcpy(&(info->stat), &(cur_entry->stat), sizeof(info->stat)); info->filename.assign(cur_entry->filename.value()); } bool FileEnumerator::IsDirectory(const FindInfo& info) { return S_ISDIR(info.stat.st_mode); } // static FilePath FileEnumerator::GetFilename(const FindInfo& find_info) { return FilePath(find_info.filename); } // static int64 FileEnumerator::GetFilesize(const FindInfo& find_info) { return find_info.stat.st_size; } // static base::Time FileEnumerator::GetLastModifiedTime(const FindInfo& find_info) { return base::Time::FromTimeT(find_info.stat.st_mtime); } bool FileEnumerator::ReadDirectory(std::vector<DirectoryEntryInfo>* entries, const FilePath& source, bool show_links) { DIR* dir = opendir(source.value().c_str()); if (!dir) return false; struct dirent* dent; while ((dent = readdir(dir))) { DirectoryEntryInfo info; info.filename = FilePath(dent->d_name); FilePath full_name = source.Append(dent->d_name); int ret = stat(full_name.value().c_str(), &info.stat); if (ret < 0) { // Print the stat() error message unless it was ENOENT and we're // following symlinks. if (!(errno == ENOENT && !show_links)) { DPLOG(ERROR) << "Couldn't stat " << source.Append(dent->d_name).value(); } memset(&info.stat, 0, sizeof(info.stat)); } entries->push_back(info); } closedir(dir); return true; } } // namespace file_util
snibug/gyp_example
src/platform/linux/chromium/base/file_util_shell.cc
C++
apache-2.0
23,574
import React from 'react'; import {Link} from 'react-router'; import '../../styles/about-page.css'; // Since this component is simple and static, there's no parent container for it. const AboutPage = () => { return ( <div> <h2 className="alt-header">About</h2> <p> This example app is part of the <a href="https://github.com/coryhouse/react-slingshot">React-Slingshot starter kit</a>. </p> <p> <Link to="/badlink">Click this bad link</Link> to see the 404 page. </p> </div> ); }; export default AboutPage;
Takaitra/RecipeRunt
web/src/components/pages/AboutPage.js
JavaScript
apache-2.0
574
from sys import maxsize class Group: def __init__(self, group_name=None, group_header=None, group_footer=None, id=None): self.group_name = group_name self.group_header = group_header self.group_footer = group_footer self.id = id def __repr__(self): return '%s:%s' % (self.id, self.group_name) def __eq__(self, other): return (self.id is None or other.id is None or self.id == other.id) and self.group_name == other.group_name def if_or_max(self): if self.id: return int(self.id) else: return maxsize class GroupBase: def __init__(self, app): self.app = app def open_group_page(self): wd = self.app.wd if not (wd.current_url.endswith('/group.php') and len(wd.find_elements_by_name('new')) > 0): wd.find_element_by_link_text("groups").click() def count(self): wd = self.app.wd self.open_group_page() return len(wd.find_elements_by_name("selected[]")) def validation_of_group_exist(self): if self.count() == 0: self.create(Group(group_name='test')) self.click_group_page() def group_line(self, field, text): wd = self.app.wd if text: wd.find_element_by_name(field).click() wd.find_element_by_name(field).clear() wd.find_element_by_name(field).send_keys(text) def create(self, Group): wd = self.app.wd self.open_group_page() wd.find_element_by_name("new").click() self.group_line('group_name', Group.group_name) self.group_line('group_header', Group.group_header) self.group_line('group_footer', Group.group_footer) wd.find_element_by_name("submit").click() self.group_cache = None def delete_first_group(self): self.delete_group_by_index(0) def click_group_page(self): wd = self.app.wd wd.find_element_by_css_selector("div.msgbox").click() wd.find_element_by_link_text("group page").click() group_cache = None def get_group_list(self): if self.group_cache is None: wd = self.app.wd self.open_group_page() self.group_cache = [] for element in wd.find_elements_by_css_selector('span.group'): text = element.text id = element.find_element_by_name('selected[]').get_attribute('value') self.group_cache.append(Group(group_name=text, id=id)) return list(self.group_cache) def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name("selected[]")[index].click() def delete_group_by_index(self, index): wd = self.app.wd self.open_group_page() self.select_group_by_index(index) wd.find_element_by_name('delete').click() self.click_group_page() self.group_cache = None def edit_group_by_index(self, Group, index): wd = self.app.wd self.open_group_page() wd.find_elements_by_name("selected[]")[index].click() wd.find_element_by_name("edit").click() self.group_line('group_name', Group.group_name) self.group_line('group_header', Group.group_header) self.group_line('group_footer', Group.group_footer) wd.find_element_by_name("update").click() wd.find_element_by_link_text("groups").click() self.group_cache = None
werbk/task-5.14
tests_group/group_lib.py
Python
apache-2.0
3,504
// Copyright (c) 2015 Uber Technologies, Inc. // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package utils import ( "net" "testing" "github.com/stretchr/testify/assert" ) func TestGetLocalIP(t *testing.T) { ip, _ := HostIP() assert.NotNil(t, ip, "assert we have an ip") } func TestParseIPToUint32(t *testing.T) { tests := []struct { in string out uint32 err error }{ {"1.2.3.4", 1<<24 | 2<<16 | 3<<8 | 4, nil}, {"127.0.0.1", 127<<24 | 1, nil}, {"localhost", 127<<24 | 1, nil}, {"127.xxx.0.1", 0, nil}, {"", 0, ErrEmptyIP}, {"hostname", 0, ErrNotFourOctets}, } for _, test := range tests { intIP, err := ParseIPToUint32(test.in) if test.err != nil { assert.Equal(t, test.err, err) } else { assert.Equal(t, test.out, intIP) } } } func TestParsePort(t *testing.T) { tests := []struct { in string out uint16 err bool }{ {"123", 123, false}, {"77777", 0, true}, // too large for 16bit {"bad-wolf", 0, true}, } for _, test := range tests { p, err := ParsePort(test.in) if test.err { assert.Error(t, err) } else { assert.Equal(t, test.out, p) } } } func TestPackIPAsUint32(t *testing.T) { ipv6a := net.IP{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 1, 2, 3, 4} ipv6b := net.ParseIP("2001:0db8:85a3:0000:0000:8a2e:0370:7334") assert.NotNil(t, ipv6a) tests := []struct { in net.IP out uint32 }{ {net.IPv4(1, 2, 3, 4), 1<<24 | 2<<16 | 3<<8 | 4}, {ipv6a, 1<<24 | 2<<16 | 3<<8 | 4}, // IPv6 but convertible to IPv4 {ipv6b, 0}, } for _, test := range tests { ip := PackIPAsUint32(test.in) assert.Equal(t, test.out, ip) } }
tomwilkie/cortex
vendor/github.com/uber/jaeger-client-go/utils/utils_test.go
GO
apache-2.0
2,640
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public class Main { public static void main(String[] args) { stringEqualsSame(); stringArgumentNotNull("Foo"); } /// CHECK-START: boolean Main.stringEqualsSame() instruction_simplifier (before) /// CHECK: InvokeStaticOrDirect /// CHECK-START: boolean Main.stringEqualsSame() register (before) /// CHECK: <<Const1:i\d+>> IntConstant 1 /// CHECK: Return [<<Const1>>] /// CHECK-START: boolean Main.stringEqualsSame() register (before) /// CHECK-NOT: InvokeStaticOrDirect public static boolean stringEqualsSame() { return $inline$callStringEquals("obj", "obj"); } /// CHECK-START: boolean Main.stringEqualsNull() register (after) /// CHECK: <<Invoke:z\d+>> InvokeVirtual /// CHECK: Return [<<Invoke>>] public static boolean stringEqualsNull() { String o = (String)myObject; return $inline$callStringEquals(o, o); } public static boolean $inline$callStringEquals(String a, String b) { return a.equals(b); } /// CHECK-START-X86: boolean Main.stringArgumentNotNull(java.lang.Object) disassembly (after) /// CHECK: InvokeVirtual {{.*\.equals.*}} /// CHECK-NOT: test public static boolean stringArgumentNotNull(Object obj) { obj.getClass(); return "foo".equals(obj); } // Test is very brittle as it depends on the order we emit instructions. /// CHECK-START-X86: boolean Main.stringArgumentIsString() disassembly (after) /// CHECK: InvokeVirtual /// CHECK: test /// CHECK: jz/eq // Check that we don't try to compare the classes. /// CHECK-NOT: mov /// CHECK: cmp public static boolean stringArgumentIsString() { return "foo".equals(myString); } static String myString; static Object myObject; }
android-art-intel/Nougat
art-extension/test/536-checker-intrinsic-optimization/src/Main.java
Java
apache-2.0
2,384
// Copyright 2011-2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.security.zynamics.binnavi.config; import com.google.security.zynamics.common.config.AbstractConfigItem; import com.google.security.zynamics.common.config.TypedPropertiesWrapper; import java.awt.Color; public class DebugColorsConfigItem extends AbstractConfigItem { private static final String BREAKPOINT_ACTIVE_COLOR = "BreakpointActive"; private static final Color BREAKPOINT_ACTIVE_COLOR_DEFAULT = new Color(-16740608); private Color breakpointActive = BREAKPOINT_ACTIVE_COLOR_DEFAULT; private static final String BREAKPOINT_INACTIVE_COLOR = "BreakpointInactive"; private static final Color BREAKPOINT_INACTIVE_COLOR_DEFAULT = new Color(-16763956); private Color breakpointInactive = BREAKPOINT_INACTIVE_COLOR_DEFAULT; private static final String BREAKPOINT_DISABLED_COLOR = "BreakpointDisabled"; private static final Color BREAKPOINT_DISABLED_COLOR_DEFAULT = new Color(-5592663); private Color breakpointDisabled = BREAKPOINT_DISABLED_COLOR_DEFAULT; private static final String BREAKPOINT_HIT_COLOR = "BreakpointHit"; private static final Color BREAKPOINT_HIT_COLOR_DEFAULT = new Color(-5046272); private Color breakpointHit = BREAKPOINT_HIT_COLOR_DEFAULT; private static final String BREAKPOINT_ENABLED_COLOR = "BreakpointEnabled"; private static final Color BREAKPOINT_ENABLED_COLOR_DEFAULT = new Color(-16740608); private Color breakpointEnabled = BREAKPOINT_ENABLED_COLOR_DEFAULT; private static final String BREAKPOINT_INVALID_COLOR = "BreakpointInvalid"; private static final Color BREAKPOINT_INVALID_COLOR_DEFAULT = new Color(-16777216); private Color breakpointInvalid = BREAKPOINT_INVALID_COLOR_DEFAULT; private static final String BREAKPOINT_DELETING_COLOR = "BreakpointDeleting"; private static final Color BREAKPOINT_DELETING_COLOR_DEFAULT = new Color(-3328); private Color breakpointDeleting = BREAKPOINT_DELETING_COLOR_DEFAULT; private static final String ACTIVE_LINE_COLOR = "ActiveLine"; private static final Color ACTIVE_LINE_COLOR_DEFAULT = new Color(-65536); private Color activeLine = ACTIVE_LINE_COLOR_DEFAULT; @Override public void load(final TypedPropertiesWrapper properties) { breakpointActive = properties.getColor(BREAKPOINT_ACTIVE_COLOR, BREAKPOINT_ACTIVE_COLOR_DEFAULT); breakpointInactive = properties.getColor(BREAKPOINT_INACTIVE_COLOR, BREAKPOINT_INACTIVE_COLOR_DEFAULT); breakpointDisabled = properties.getColor(BREAKPOINT_DISABLED_COLOR, BREAKPOINT_DISABLED_COLOR_DEFAULT); breakpointHit = properties.getColor(BREAKPOINT_HIT_COLOR, BREAKPOINT_HIT_COLOR_DEFAULT); breakpointEnabled = properties.getColor(BREAKPOINT_ENABLED_COLOR, BREAKPOINT_ENABLED_COLOR_DEFAULT); breakpointInvalid = properties.getColor(BREAKPOINT_INVALID_COLOR, BREAKPOINT_INVALID_COLOR_DEFAULT); breakpointDeleting = properties.getColor(BREAKPOINT_DELETING_COLOR, BREAKPOINT_DELETING_COLOR_DEFAULT); activeLine = properties.getColor(ACTIVE_LINE_COLOR, ACTIVE_LINE_COLOR_DEFAULT); } @Override public void store(final TypedPropertiesWrapper properties) { properties.setColor(BREAKPOINT_ACTIVE_COLOR, breakpointActive); properties.setColor(BREAKPOINT_INACTIVE_COLOR, breakpointInactive); properties.setColor(BREAKPOINT_DISABLED_COLOR, breakpointDisabled); properties.setColor(BREAKPOINT_HIT_COLOR, breakpointHit); properties.setColor(BREAKPOINT_ENABLED_COLOR, breakpointEnabled); properties.setColor(BREAKPOINT_INVALID_COLOR, breakpointInvalid); properties.setColor(BREAKPOINT_DELETING_COLOR, breakpointDeleting); properties.setColor(ACTIVE_LINE_COLOR, activeLine); } public Color getBreakpointActive() { return breakpointActive; } public void setBreakpointActive(final Color value) { this.breakpointActive = value; } public Color getBreakpointInactive() { return breakpointInactive; } public void setBreakpointInactive(final Color value) { this.breakpointInactive = value; } public Color getBreakpointDisabled() { return breakpointDisabled; } public void setBreakpointDisabled(final Color value) { this.breakpointDisabled = value; } public Color getBreakpointHit() { return breakpointHit; } public void setBreakpointHit(final Color value) { this.breakpointHit = value; } public Color getBreakpointEnabled() { return breakpointEnabled; } public void setBreakpointEnabled(final Color value) { this.breakpointEnabled = value; } public Color getBreakpointInvalid() { return breakpointInvalid; } public void setBreakpointInvalid(final Color value) { this.breakpointInvalid = value; } public Color getBreakpointDeleting() { return breakpointDeleting; } public void setBreakpointDeleting(final Color value) { this.breakpointDeleting = value; } public Color getActiveLine() { return activeLine; } public void setActiveLine(final Color value) { this.activeLine = value; } }
google/binnavi
src/main/java/com/google/security/zynamics/binnavi/config/DebugColorsConfigItem.java
Java
apache-2.0
5,605
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2015 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.workflow.bulk.removal.impl.exceptions; public class WorkflowRemovalMaxDurationExceededException extends Exception { }
Sivaramvt/acs-aem-commons
bundle/src/main/java/com/adobe/acs/commons/workflow/bulk/removal/impl/exceptions/WorkflowRemovalMaxDurationExceededException.java
Java
apache-2.0
794
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.uiDesigner.snapShooter; import com.intellij.execution.*; import com.intellij.execution.application.ApplicationConfiguration; import com.intellij.execution.application.ApplicationConfigurationType; import com.intellij.execution.executors.DefaultRunExecutor; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.runners.ProgramRunner; import com.intellij.execution.util.JreVersionDetector; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeView; import com.intellij.ide.highlighter.JavaHighlightingColors; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.LangDataKeys; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.ui.ColoredTreeCellRenderer; import com.intellij.ui.DocumentAdapter; import com.intellij.ui.SimpleTextAttributes; import com.intellij.uiDesigner.GuiFormFileType; import com.intellij.uiDesigner.UIDesignerBundle; import com.intellij.uiDesigner.designSurface.InsertComponentProcessor; import com.intellij.uiDesigner.palette.ComponentItem; import com.intellij.uiDesigner.palette.Palette; import com.intellij.uiDesigner.radComponents.LayoutManagerRegistry; import com.intellij.uiDesigner.radComponents.RadComponentFactory; import com.intellij.uiDesigner.radComponents.RadContainer; import com.intellij.util.IncorrectOperationException; import icons.UIDesignerIcons; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.TreeSet; /** * @author yole */ public class CreateSnapShotAction extends AnAction { private static final Logger LOG = Logger.getInstance("com.intellij.uiDesigner.snapShooter.CreateSnapShotAction"); @Override public void update(AnActionEvent e) { final Project project = e.getData(CommonDataKeys.PROJECT); final IdeView view = e.getData(LangDataKeys.IDE_VIEW); e.getPresentation().setVisible(project != null && view != null && hasDirectoryInPackage(project, view)); } private static boolean hasDirectoryInPackage(final Project project, final IdeView view) { ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(project).getFileIndex(); PsiDirectory[] dirs = view.getDirectories(); for (PsiDirectory dir : dirs) { if (projectFileIndex.isInSourceContent(dir.getVirtualFile()) && JavaDirectoryService.getInstance().getPackage(dir) != null) { return true; } } return false; } public void actionPerformed(AnActionEvent e) { final Project project = e.getData(CommonDataKeys.PROJECT); final IdeView view = e.getData(LangDataKeys.IDE_VIEW); if (project == null || view == null) { return; } final PsiDirectory dir = view.getOrChooseDirectory(); if (dir == null) return; final SnapShotClient client = new SnapShotClient(); List<RunnerAndConfigurationSettings> appConfigurations = new ArrayList<RunnerAndConfigurationSettings>(); RunnerAndConfigurationSettings snapshotConfiguration = null; boolean connected = false; ApplicationConfigurationType cfgType = ApplicationConfigurationType.getInstance(); List<RunnerAndConfigurationSettings> racsi = RunManager.getInstance(project).getConfigurationSettingsList(cfgType); for(RunnerAndConfigurationSettings config: racsi) { if (config.getConfiguration() instanceof ApplicationConfiguration) { ApplicationConfiguration appConfig = (ApplicationConfiguration) config.getConfiguration(); appConfigurations.add(config); if (appConfig.ENABLE_SWING_INSPECTOR) { SnapShooterConfigurationSettings settings = SnapShooterConfigurationSettings.get(appConfig); snapshotConfiguration = config; if (settings.getLastPort() > 0) { try { client.connect(settings.getLastPort()); connected = true; } catch(IOException ex) { connected = false; } } } if (connected) break; } } if (snapshotConfiguration == null) { snapshotConfiguration = promptForSnapshotConfiguration(project, appConfigurations); if (snapshotConfiguration == null) return; } if (!connected) { int rc = Messages.showYesNoDialog(project, UIDesignerBundle.message("snapshot.run.prompt"), UIDesignerBundle.message("snapshot.title"), Messages.getQuestionIcon()); if (rc == 1) return; final ApplicationConfiguration appConfig = (ApplicationConfiguration) snapshotConfiguration.getConfiguration(); final SnapShooterConfigurationSettings settings = SnapShooterConfigurationSettings.get(appConfig); settings.setNotifyRunnable(new Runnable() { public void run() { SwingUtilities.invokeLater(new Runnable() { public void run() { Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.prepare.notice"), UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon()); try { client.connect(settings.getLastPort()); } catch(IOException ex) { Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.connection.error"), UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon()); return; } runSnapShooterSession(client, project, dir, view); } }); } }); try { final ProgramRunner runner = RunnerRegistry.getInstance().getRunner(DefaultRunExecutor.EXECUTOR_ID, appConfig); LOG.assertTrue(runner != null, "Runner MUST not be null!"); Executor executor = DefaultRunExecutor.getRunExecutorInstance(); runner.execute( new ExecutionEnvironment(executor, runner, snapshotConfiguration, project)); } catch (ExecutionException ex) { Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.run.error", ex.getMessage()), UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon()); } } else { runSnapShooterSession(client, project, dir, view); } } private static void runSnapShooterSession(final SnapShotClient client, final Project project, final PsiDirectory dir, final IdeView view) { try { client.suspendSwing(); } catch (IOException e1) { Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.connection.error"), UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon()); return; } final MyDialog dlg = new MyDialog(project, client, dir); dlg.show(); if (dlg.getExitCode() == DialogWrapper.OK_EXIT_CODE) { final int id = dlg.getSelectedComponentId(); final Ref<Object> result = new Ref<Object>(); ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() { public void run() { try { result.set(client.createSnapshot(id)); } catch (Exception ex) { result.set(ex); } } }, UIDesignerBundle.message("progress.creating.snapshot"), false, project); String snapshot = null; if (result.get() instanceof String) { snapshot = (String) result.get(); } else { Exception ex = (Exception) result.get(); Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.create.error", ex.getMessage()), UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon()); } if (snapshot != null) { final String snapshot1 = snapshot; ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { CommandProcessor.getInstance().executeCommand(project, new Runnable() { public void run() { try { PsiFile formFile = PsiFileFactory.getInstance(dir.getProject()) .createFileFromText(dlg.getFormName() + GuiFormFileType.DOT_DEFAULT_EXTENSION, snapshot1); formFile = (PsiFile)dir.add(formFile); formFile.getVirtualFile().setCharset(CharsetToolkit.UTF8_CHARSET); formFile.getViewProvider().getDocument().setText(snapshot1); view.selectElement(formFile); } catch (IncorrectOperationException ex) { Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.save.error", ex.getMessage()), UIDesignerBundle.message("snapshot.title"), Messages.getErrorIcon()); } } }, "", null); } }); } } try { client.resumeSwing(); } catch (IOException ex) { Messages.showErrorDialog(project, UIDesignerBundle.message("snapshot.connection.broken"), UIDesignerBundle.message("snapshot.title")); } client.dispose(); } @Nullable private static RunnerAndConfigurationSettings promptForSnapshotConfiguration(final Project project, final List<RunnerAndConfigurationSettings> configurations) { if (configurations.isEmpty()) { Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.no.configuration.error"), UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon()); return null; } for(int i=configurations.size()-1; i >= 0; i--) { final JreVersionDetector detector = new JreVersionDetector(); final ApplicationConfiguration configuration = (ApplicationConfiguration)configurations.get(i).getConfiguration(); if (!detector.isJre50Configured(configuration) && !detector.isModuleJre50Configured(configuration)) { configurations.remove(i); } } if (configurations.isEmpty()) { Messages.showMessageDialog(project, UIDesignerBundle.message("snapshot.no.compatible.configuration.error"), UIDesignerBundle.message("snapshot.title"), Messages.getInformationIcon()); return null; } final RunnerAndConfigurationSettings snapshotConfiguration; if (configurations.size() == 1) { final int rc = Messages.showYesNoDialog( project, UIDesignerBundle.message("snapshot.confirm.configuration.prompt", configurations.get(0).getConfiguration().getName()), UIDesignerBundle.message("snapshot.title"), Messages.getQuestionIcon()); if (rc == 1) { return null; } snapshotConfiguration = configurations.get(0); } else { String[] names = new String[configurations.size()]; for(int i=0; i<configurations.size(); i++) { names [i] = configurations.get(i).getConfiguration().getName(); } int rc = Messages.showChooseDialog( project, UIDesignerBundle.message("snapshot.choose.configuration.prompt"), UIDesignerBundle.message("snapshot.title"), Messages.getQuestionIcon(), names, names [0] ); if (rc < 0) return null; snapshotConfiguration = configurations.get(rc); } ((ApplicationConfiguration) snapshotConfiguration.getConfiguration()).ENABLE_SWING_INSPECTOR = true; return snapshotConfiguration; } private static class MyDialog extends DialogWrapper { private JPanel myRootPanel; private JTree myComponentTree; private JTextField myFormNameTextField; private JLabel myErrorLabel; private final Project myProject; private final SnapShotClient myClient; private final PsiDirectory myDirectory; @NonNls private static final String SWING_PACKAGE = "javax.swing."; private MyDialog(Project project, final SnapShotClient client, final PsiDirectory dir) { super(project, true); myProject = project; myClient = client; myDirectory = dir; init(); setTitle(UIDesignerBundle.message("snapshot.title")); final SnapShotTreeModel model = new SnapShotTreeModel(client); myComponentTree.setModel(model); myComponentTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); myComponentTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() { public void valueChanged(TreeSelectionEvent e) { updateOKAction(); } }); for(int i=0; i<2; i++) { for(int row=myComponentTree.getRowCount()-1; row >= 0; row--) { myComponentTree.expandRow(row); } } myComponentTree.getSelectionModel().setSelectionPath(myComponentTree.getPathForRow(0)); myFormNameTextField.setText(suggestFormName()); final EditorColorsScheme globalScheme = EditorColorsManager.getInstance().getGlobalScheme(); final TextAttributes attributes = globalScheme.getAttributes(JavaHighlightingColors.STRING); final SimpleTextAttributes titleAttributes = new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, attributes.getForegroundColor()); myComponentTree.setCellRenderer(new ColoredTreeCellRenderer() { public void customizeCellRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { SnapShotRemoteComponent rc = (SnapShotRemoteComponent) value; String className = rc.getClassName(); if (className.startsWith(SWING_PACKAGE)) { append(className.substring(SWING_PACKAGE.length()), SimpleTextAttributes.REGULAR_ATTRIBUTES); } else { append(className, SimpleTextAttributes.REGULAR_ATTRIBUTES); } if (rc.getText().length() > 0) { append(" \"" + rc.getText() + "\"", titleAttributes); } if (rc.getLayoutManager().length() > 0) { append(" (" + rc.getLayoutManager() + ")", SimpleTextAttributes.GRAY_ATTRIBUTES); } if (rc.isTopLevel()) { setIcon(AllIcons.FileTypes.UiForm); } else { final Palette palette = Palette.getInstance(myProject); final ComponentItem item = palette.getItem(rc.getClassName()); if (item != null) { setIcon(item.getSmallIcon()); } else { setIcon(UIDesignerIcons.Unknown); } } } }); myFormNameTextField.getDocument().addDocumentListener(new DocumentAdapter() { protected void textChanged(DocumentEvent e) { updateOKAction(); } }); updateOKAction(); } @NonNls private String suggestFormName() { int count = 0; do { count++; } while(myDirectory.findFile("Form" + count + GuiFormFileType.DOT_DEFAULT_EXTENSION) != null); return "Form" + count; } private void updateOKAction() { final boolean selectedComponentValid = isSelectedComponentValid(); setOKActionEnabled(isFormNameValid() && selectedComponentValid); if (myComponentTree.getSelectionPath() != null && !selectedComponentValid) { myErrorLabel.setText(UIDesignerBundle.message("snapshooter.invalid.container")); } else { myErrorLabel.setText(" "); } } private boolean isSelectedComponentValid() { final TreePath selectionPath = myComponentTree.getSelectionPath(); if (selectionPath == null) return false; SnapShotRemoteComponent rc = (SnapShotRemoteComponent) selectionPath.getLastPathComponent(); if (isValidComponent(rc)) return true; if (selectionPath.getPathCount() == 2) { // capture frame/dialog root pane when a frame or dialog itself is selected final SnapShotRemoteComponent[] children = rc.getChildren(); return children != null && children.length > 0 && isValidComponent(children[0]); } return false; } private boolean isValidComponent(final SnapShotRemoteComponent rc) { PsiClass componentClass = JavaPsiFacade.getInstance(myProject).findClass(rc.getClassName().replace('$', '.'), GlobalSearchScope.allScope(myProject)); while(componentClass != null) { if (JPanel.class.getName().equals(componentClass.getQualifiedName()) || JTabbedPane.class.getName().equals(componentClass.getQualifiedName()) || JScrollPane.class.getName().equals(componentClass.getQualifiedName()) || JSplitPane.class.getName().equals(componentClass.getQualifiedName())) { return true; } componentClass = componentClass.getSuperClass(); } return false; } private boolean isFormNameValid() { return myFormNameTextField.getText().length() > 0; } @Override @NonNls protected String getDimensionServiceKey() { return "CreateSnapShotAction.MyDialog"; } @Override public JComponent getPreferredFocusedComponent() { return myFormNameTextField; } @NotNull @Override protected Action getOKAction() { final Action okAction = super.getOKAction(); okAction.putValue(Action.NAME, UIDesignerBundle.message("create.snapshot.button")); return okAction; } @Override protected void doOKAction() { if (getOKAction().isEnabled()) { try { myDirectory.checkCreateFile(getFormName() + GuiFormFileType.DOT_DEFAULT_EXTENSION); } catch (IncorrectOperationException e) { JOptionPane.showMessageDialog(myRootPanel, UIDesignerBundle.message("error.form.already.exists", getFormName())); return; } if (!checkUnknownLayoutManagers(myDirectory.getProject())) return; close(OK_EXIT_CODE); } } private boolean checkUnknownLayoutManagers(final Project project) { final Set<String> layoutManagerClasses = new TreeSet<String>(); final SnapShotRemoteComponent rc = (SnapShotRemoteComponent) myComponentTree.getSelectionPath().getLastPathComponent(); assert rc != null; final Ref<Exception> err = new Ref<Exception>(); Runnable runnable = new Runnable() { public void run() { try { collectUnknownLayoutManagerClasses(project, rc, layoutManagerClasses); } catch (IOException e) { err.set(e); } } }; if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(runnable, UIDesignerBundle.message("progress.validating.layout.managers"), false, project)) { return false; } if (!err.isNull()) { Messages.showErrorDialog(myRootPanel, UIDesignerBundle.message("snapshot.connection.broken"), UIDesignerBundle.message("snapshot.title")); return false; } if (!layoutManagerClasses.isEmpty()) { StringBuilder builder = new StringBuilder(UIDesignerBundle.message("snapshot.unknown.layout.prefix")); for(String layoutManagerClass: layoutManagerClasses) { builder.append(layoutManagerClass).append("\n"); } builder.append(UIDesignerBundle.message("snapshot.unknown.layout.prompt")); return Messages.showYesNoDialog(myProject, builder.toString(), UIDesignerBundle.message("snapshot.title"), Messages.getQuestionIcon()) == 0; } return true; } private void collectUnknownLayoutManagerClasses(final Project project, final SnapShotRemoteComponent rc, final Set<String> layoutManagerClasses) throws IOException { RadComponentFactory factory = InsertComponentProcessor.getRadComponentFactory(project, rc.getClassName()); if (factory instanceof RadContainer.Factory && rc.getLayoutManager().length() > 0 && !LayoutManagerRegistry.isKnownLayoutClass(rc.getLayoutManager())) { layoutManagerClasses.add(rc.getLayoutManager()); } SnapShotRemoteComponent[] children = rc.getChildren(); if (children == null) { children = myClient.listChildren(rc.getId()); rc.setChildren(children); } for(SnapShotRemoteComponent child: children) { collectUnknownLayoutManagerClasses(project, child, layoutManagerClasses); } } @Nullable protected JComponent createCenterPanel() { return myRootPanel; } public int getSelectedComponentId() { final TreePath selectionPath = myComponentTree.getSelectionPath(); SnapShotRemoteComponent rc = (SnapShotRemoteComponent) selectionPath.getLastPathComponent(); if (!isValidComponent(rc) && selectionPath.getPathCount() == 2) { // capture frame/dialog root pane when a frame or dialog itself is selected final SnapShotRemoteComponent[] children = rc.getChildren(); if (children != null && children.length > 0 && isValidComponent(children [0])) { return children [0].getId(); } } return rc.getId(); } public String getFormName() { return myFormNameTextField.getText(); } } }
romankagan/DDBWorkbench
plugins/ui-designer/src/com/intellij/uiDesigner/snapShooter/CreateSnapShotAction.java
Java
apache-2.0
23,500
/** * Copyright (c) 2013-2020 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.text.ParseException; import org.junit.Test; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.query.filter.expression.And; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween; import org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser; import org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement; import org.locationtech.geowave.core.store.query.gwql.statement.Statement; public class SelectStatementTest extends AbstractGWQLTest { @Test public void testInvalidStatements() { final DataStore dataStore = createDataStore(); // Missing from assertInvalidStatement(dataStore, "SELECT *", "expecting FROM"); // Missing store and type name assertInvalidStatement(dataStore, "SELECT * FROM", "missing IDENTIFIER"); // Missing everything assertInvalidStatement(dataStore, "SELECT", "expecting {'*', IDENTIFIER}"); // All columns and single selector assertInvalidStatement(dataStore, "SELECT *, pop FROM type", "expecting FROM"); // All columns and aggregation selector assertInvalidStatement(dataStore, "SELECT *, agg(column) FROM type", "expecting FROM"); // Nonexistent type assertInvalidStatement(dataStore, "SELECT * FROM nonexistent", "No type named nonexistent"); // No selectors assertInvalidStatement(dataStore, "SELECT FROM type", "expecting {'*', IDENTIFIER}"); // Aggregation and non aggregation selectors assertInvalidStatement(dataStore, "SELECT agg(*), pop FROM type", "expecting '('"); // No where filter assertInvalidStatement(dataStore, "SELECT * FROM type WHERE", "mismatched input '<EOF>'"); // No limit count assertInvalidStatement(dataStore, "SELECT * FROM type LIMIT", "missing INTEGER"); // Non-integer limit count assertInvalidStatement(dataStore, "SELECT * FROM type LIMIT 1.5", "expecting INTEGER"); // Missing column alias assertInvalidStatement(dataStore, "SELECT pop AS FROM type", "expecting IDENTIFIER"); } @Test public void testValidStatements() { final DataStore dataStore = createDataStore(); GWQLParser.parseStatement(dataStore, "SELECT * FROM type"); GWQLParser.parseStatement(dataStore, "SELECT * FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT * FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT * FROM type WHERE pop > 1 LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type WHERE pop > 1 LIMIT 2"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type WHERE pop > 1 LIMIT 2"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type WHERE pop > 1 LIMIT 3"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type WHERE pop > 1 LIMIT 3"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type WHERE pop > 1 LIMIT 4"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type WHERE pop > 1 LIMIT 4"); } @Test public void testAllColumns() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "SELECT * FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNull(selectStatement.getFilter()); } @Test public void testAllColumnsWithFilter() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc'"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof And); And andFilter = (And) filter; assertTrue(andFilter.getChildren().length == 2); assertTrue(andFilter.getChildren()[0] instanceof NumericBetween); assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator); assertNull(selectStatement.getLimit()); } @Test public void testAllColumnsWithFilterAndLimit() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc' LIMIT 1"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof And); And andFilter = (And) filter; assertTrue(andFilter.getChildren().length == 2); assertTrue(andFilter.getChildren()[0] instanceof NumericBetween); assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator); assertNotNull(selectStatement.getLimit()); assertEquals(1, selectStatement.getLimit().intValue()); } @Test public void testAggregation() { final DataStore dataStore = createDataStore(); final String statement = "SELECT sum(pop) FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertTrue(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 1); assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector); AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0); assertNull(selector.alias()); assertEquals("sum", selector.functionName()); assertEquals(1, selector.functionArgs().length); assertEquals("pop", selector.functionArgs()[0]); assertNull(selectStatement.getFilter()); } @Test public void testAggregationAlias() { final DataStore dataStore = createDataStore(); final String statement = "SELECT sum(pop) AS total FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertTrue(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 1); assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector); AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0); assertEquals("total", selector.alias()); assertEquals("sum", selector.functionName()); assertEquals(1, selector.functionArgs().length); assertEquals("pop", selector.functionArgs()[0]); assertNull(selectStatement.getFilter()); } @Test public void testColumnSubset() { final DataStore dataStore = createDataStore(); final String statement = "SELECT pop, start, end FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 3); assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector); ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0); assertNull(selector.alias()); assertEquals("pop", selector.columnName()); assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(1); assertNull(selector.alias()); assertEquals("start", selector.columnName()); assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(2); assertNull(selector.alias()); assertEquals("end", selector.columnName()); assertNull(selectStatement.getFilter()); } @Test public void testColumnSubsetWithAliases() { final DataStore dataStore = createDataStore(); final String statement = "SELECT pop AS pop_alt, start, end AS end_alt FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 3); assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector); ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0); assertEquals("pop_alt", selector.alias()); assertEquals("pop", selector.columnName()); assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(1); assertNull(selector.alias()); assertEquals("start", selector.columnName()); assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(2); assertEquals("end_alt", selector.alias()); assertEquals("end", selector.columnName()); assertNull(selectStatement.getFilter()); } @Test public void testUnconventionalNaming() { final DataStore dataStore = createDataStore( BasicDataTypeAdapter.newAdapter("ty-p3", UnconventionalNameType.class, "pid"), "a-1"); final String statement = "SELECT [a-1], `b-2`, \"c-3\" FROM [ty-p3]"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement<?> selectStatement = (SelectStatement<?>) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("ty-p3", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 3); assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector); ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0); assertNull(selector.alias()); assertEquals("a-1", selector.columnName()); assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(1); assertNull(selector.alias()); assertEquals("b-2", selector.columnName()); assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(2); assertNull(selector.alias()); assertEquals("c-3", selector.columnName()); assertNull(selectStatement.getFilter()); } @GeoWaveDataType protected static class UnconventionalNameType { @GeoWaveField(name = "pid") private String pid; @GeoWaveField(name = "a-1") private Long a1; @GeoWaveField(name = "b-2") private Long b2; @GeoWaveField(name = "c-3") private Long c3; public UnconventionalNameType() {} public UnconventionalNameType(final String pid, final Long a1, final Long b2, final Long c3) { this.pid = pid; this.a1 = a1; this.b2 = b2; this.c3 = c3; } } }
locationtech/geowave
core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/SelectStatementTest.java
Java
apache-2.0
15,592
package it.unimi.dsi.fastutil; /* * Copyright (C) 2010-2015 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Collection; /** An interface for data structures whose size can exceed {@link Integer#MAX_VALUE}. * * <P>The only methods specified by this interfaces are {@link #size64()}, and * a deprecated {@link #size()} identical to {@link Collection#size()}. Implementations * can work around the type problem of {@link java.util.Collection#size()} * (e.g., not being able to return more than {@link Integer#MAX_VALUE}) by implementing this * interface. Callers interested in large structures * can use a reflective call to <code>instanceof</code> to check for the presence of {@link #size64()}. * * <p>We remark that it is always a good idea to implement both {@link #size()} <em>and</em> {@link #size64()}, * as the former might be implemented by a superclass in an incompatible way. If you implement this interface, * just implement {@link #size()} as a <em>deprecated</em> method returning <code>Math.min(Integer.MAX_VALUE, size64())</code>. */ public interface Size64 { /** Returns the size of this data structure as a long. * * @return the size of this data structure. */ long size64(); /** Returns the size of this data structure, minimized with {@link Integer#MAX_VALUE}. * * @return the size of this data structure, minimized with {@link Integer#MAX_VALUE}. * @see java.util.Collection#size() * @deprecated Use {@link #size64()} instead. */ @Deprecated int size(); }
agavra/fastutil
src/it/unimi/dsi/fastutil/Size64.java
Java
apache-2.0
2,069
require 'serverspec' require 'pathname' require 'json' require 'rubygems/dependency_installer'
RightScale-Services-Cookbooks/rsc_postfix
test/integration/local/serverspec/spec_helper.rb
Ruby
apache-2.0
96
import legacyProcessorShim from './legacy-processor-shim'; import legacyReaderShim from './legacy-reader-shim'; import legacySliceEventsShim from './legacy-slice-events-shim'; import operationAPIShim from './operation-api-shim'; import processorShim from './processor-shim'; import readerShim from './reader-shim'; import schemaShim from './schema-shim'; export { legacyProcessorShim, legacyReaderShim, legacySliceEventsShim, operationAPIShim, processorShim, readerShim, schemaShim, };
terascope/teraslice
packages/job-components/src/operations/shims/index.ts
TypeScript
apache-2.0
515
class ChangeUtcTimeProfileTypeToGlobal < ActiveRecord::Migration[4.2] class TimeProfile < ActiveRecord::Base; end def up say_with_time("Change Default UTC Time Profile to Type Global") do TimeProfile.all.each do |tp| if tp.profile_type.nil? tp.profile_type = "global" tp.save end end end end end
NaNi-Z/manageiq
db/migrate/20140402134329_change_utc_time_profile_type_to_global.rb
Ruby
apache-2.0
359
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.hive; import com.google.common.collect.ImmutableMap; import io.prestosql.plugin.hive.metastore.StorageFormat; import io.prestosql.plugin.hive.rcfile.HdfsRcFileDataSource; import io.prestosql.rcfile.RcFileDataSource; import io.prestosql.rcfile.RcFileEncoding; import io.prestosql.rcfile.binary.BinaryRcFileEncoding; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.type.Type; import io.prestosql.spi.type.TypeManager; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.joda.time.DateTimeZone; import javax.inject.Inject; import java.io.IOException; import java.io.OutputStream; import java.util.List; import java.util.Optional; import java.util.Properties; import java.util.concurrent.Callable; import java.util.function.Supplier; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITE_VALIDATION_FAILED; import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME; import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME; import static io.prestosql.plugin.hive.HiveSessionProperties.isRcfileOptimizedWriterValidate; import static io.prestosql.plugin.hive.rcfile.RcFilePageSourceFactory.createTextVectorEncoding; import static io.prestosql.plugin.hive.util.HiveUtil.getColumnNames; import static io.prestosql.plugin.hive.util.HiveUtil.getColumnTypes; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; public class RcFileFileWriterFactory implements HiveFileWriterFactory { private final DateTimeZone hiveStorageTimeZone; private final HdfsEnvironment hdfsEnvironment; private final TypeManager typeManager; private final NodeVersion nodeVersion; private final FileFormatDataSourceStats stats; @Inject public RcFileFileWriterFactory( HdfsEnvironment hdfsEnvironment, TypeManager typeManager, NodeVersion nodeVersion, HiveConfig hiveConfig, FileFormatDataSourceStats stats) { this(hdfsEnvironment, typeManager, nodeVersion, requireNonNull(hiveConfig, "hiveConfig is null").getDateTimeZone(), stats); } public RcFileFileWriterFactory( HdfsEnvironment hdfsEnvironment, TypeManager typeManager, NodeVersion nodeVersion, DateTimeZone hiveStorageTimeZone, FileFormatDataSourceStats stats) { this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.nodeVersion = requireNonNull(nodeVersion, "nodeVersion is null"); this.hiveStorageTimeZone = requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null"); this.stats = requireNonNull(stats, "stats is null"); } @Override public Optional<FileWriter> createFileWriter( Path path, List<String> inputColumnNames, StorageFormat storageFormat, Properties schema, JobConf configuration, ConnectorSession session) { if (!RCFileOutputFormat.class.getName().equals(storageFormat.getOutputFormat())) { return Optional.empty(); } RcFileEncoding rcFileEncoding; if (LazyBinaryColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) { rcFileEncoding = new BinaryRcFileEncoding(); } else if (ColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) { rcFileEncoding = createTextVectorEncoding(schema, hiveStorageTimeZone); } else { return Optional.empty(); } Optional<String> codecName = Optional.ofNullable(configuration.get(FileOutputFormat.COMPRESS_CODEC)); // existing tables and partitions may have columns in a different order than the writer is providing, so build // an index to rearrange columns in the proper order List<String> fileColumnNames = getColumnNames(schema); List<Type> fileColumnTypes = getColumnTypes(schema).stream() .map(hiveType -> hiveType.getType(typeManager)) .collect(toList()); int[] fileInputColumnIndexes = fileColumnNames.stream() .mapToInt(inputColumnNames::indexOf) .toArray(); try { FileSystem fileSystem = hdfsEnvironment.getFileSystem(session.getUser(), path, configuration); OutputStream outputStream = fileSystem.create(path); Optional<Supplier<RcFileDataSource>> validationInputFactory = Optional.empty(); if (isRcfileOptimizedWriterValidate(session)) { validationInputFactory = Optional.of(() -> { try { return new HdfsRcFileDataSource( path.toString(), fileSystem.open(path), fileSystem.getFileStatus(path).getLen(), stats); } catch (IOException e) { throw new PrestoException(HIVE_WRITE_VALIDATION_FAILED, e); } }); } Callable<Void> rollbackAction = () -> { fileSystem.delete(path, false); return null; }; return Optional.of(new RcFileFileWriter( outputStream, rollbackAction, rcFileEncoding, fileColumnTypes, codecName, fileInputColumnIndexes, ImmutableMap.<String, String>builder() .put(PRESTO_VERSION_NAME, nodeVersion.toString()) .put(PRESTO_QUERY_ID_NAME, session.getQueryId()) .build(), validationInputFactory)); } catch (Exception e) { throw new PrestoException(HIVE_WRITER_OPEN_ERROR, "Error creating RCFile file", e); } } }
hgschmie/presto
presto-hive/src/main/java/io/prestosql/plugin/hive/RcFileFileWriterFactory.java
Java
apache-2.0
7,137
/** * Copyright 2017 Goldman Sachs. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.gs.obevo.db.api.platform; import javax.sql.DataSource; import com.gs.obevo.api.appdata.Change; import com.gs.obevo.api.appdata.ChangeInput; import com.gs.obevo.api.platform.DeployExecutionDao; import com.gs.obevo.api.platform.DeployerAppContext; import com.gs.obevo.api.platform.MainDeployerArgs; import com.gs.obevo.db.api.appdata.DbEnvironment; import com.gs.obevo.db.impl.core.checksum.DbChecksumDao; import com.gs.obevo.dbmetadata.api.DbMetadataManager; import org.eclipse.collections.api.list.ImmutableList; public interface DbDeployerAppContext extends DeployerAppContext<DbEnvironment, DbDeployerAppContext> { boolean STRICT_SETUP_ENV_INFRA_DEFAULT = false; /** * Sets whether to fail the command if the environment setup fails for certain operations (true) or to log a warning (false). * * @deprecated Renamed to {@link #setStrictSetupEnvInfra(boolean)} */ @Deprecated DbDeployerAppContext setFailOnSetupException(boolean failOnSetupException); /** * Sets whether to fail the command if the environment setup fails for certain operations (true) or to log a warning (false). */ DbDeployerAppContext setStrictSetupEnvInfra(boolean strictSetupEnvInfra); ImmutableList<Change> readChangesFromAudit(); ImmutableList<ChangeInput> readChangesFromSource(); ImmutableList<ChangeInput> readChangesFromSource(boolean useBaseline); DbMetadataManager getDbMetadataManager(); SqlExecutor getSqlExecutor(); DeployExecutionDao getDeployExecutionDao(); DbChecksumDao getDbChecksumDao(); /** * Data Source with a single shared connection that clients can use to access the database being deployed. * This should NOT be used by this internal product code. This is only here for external clients. */ DataSource getDataSource(); DbDeployerAppContext cleanAndDeploy(); DbDeployerAppContext setupAndCleanAndDeploy(); /** * Read in the input files and return stats. Only used for cases w/ some external integrations where a client wants * to read the metrics from the input source. */ void readSource(MainDeployerArgs deployerArgs); }
shantstepanian/obevo
obevo-db/src/main/java/com/gs/obevo/db/api/platform/DbDeployerAppContext.java
Java
apache-2.0
2,854
/* * Copyright 2016 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb.client; import org.bson.BsonReader; import org.bson.BsonValue; import org.bson.BsonWriter; import org.bson.Document; import org.bson.codecs.CollectibleCodec; import org.bson.codecs.DecoderContext; import org.bson.codecs.EncoderContext; import org.bson.codecs.configuration.CodecRegistry; import org.bson.types.ObjectId; import java.util.LinkedHashMap; import static java.lang.String.format; public final class ImmutableDocumentCodec implements CollectibleCodec<ImmutableDocument> { private final CodecRegistry codecRegistry; private static final String ID_FIELD_NAME = "_id"; public ImmutableDocumentCodec(final CodecRegistry codecRegistry) { this.codecRegistry = codecRegistry; } @Override public ImmutableDocument generateIdIfAbsentFromDocument(final ImmutableDocument document) { LinkedHashMap<String, Object> mutable = new LinkedHashMap<String, Object>(document); mutable.put(ID_FIELD_NAME, new ObjectId()); return new ImmutableDocument(mutable); } @Override public boolean documentHasId(final ImmutableDocument document) { return document.containsKey(ID_FIELD_NAME); } @Override public BsonValue getDocumentId(final ImmutableDocument document) { if (!documentHasId(document)) { throw new IllegalStateException(format("The document does not contain an %s", ID_FIELD_NAME)); } return document.toBsonDocument(ImmutableDocument.class, codecRegistry).get(ID_FIELD_NAME); } @Override public void encode(final BsonWriter writer, final ImmutableDocument value, final EncoderContext encoderContext) { codecRegistry.get(Document.class).encode(writer, new Document(value), encoderContext); } @Override public Class<ImmutableDocument> getEncoderClass() { return ImmutableDocument.class; } @Override public ImmutableDocument decode(final BsonReader reader, final DecoderContext decoderContext) { Document document = codecRegistry.get(Document.class).decode(reader, decoderContext); return new ImmutableDocument(document); } }
jsonking/mongo-java-driver
driver-core/src/test/unit/com/mongodb/client/ImmutableDocumentCodec.java
Java
apache-2.0
2,742
# encoding: UTF-8 # Copyright 2012 Twitter, Inc # http://www.apache.org/licenses/LICENSE-2.0 module TwitterCldr module Resources module Properties class PropertyImporter < Importer private def execute load.each_pair do |property_name, property_values| property_values.each_pair do |property_value, ranges| database.store(property_name, property_value, ranges) end end end def database @database ||= TwitterCldr::Shared::PropertiesDatabase.new( params.fetch(:output_path) ) end def parse_file(file, &block) UnicodeFileParser.parse_standard_file(file, &block) end def load results = Hash.new do |h, k| h[k] = Hash.new { |h, k| h[k] = [] } end rangify_hash( parse_file(source_path).each_with_object(results) do |data, ret| next unless data[0].size > 0 if block_given? yield data, ret else code_points = expand_range(data[0]) property_value = format_property_value(data[1]) ret[property_name][property_value] += code_points end end ) end def rangify_hash(hash) hash.each_with_object({}) do |(key, value), ret| ret[key] = case value when Hash rangify_hash(value) when Array TwitterCldr::Utils::RangeSet.from_array(value) end end end def expand_range(str) initial, final = str.split("..") (initial.to_i(16)..(final || initial).to_i(16)).to_a end def format_property_value(value) value end end end end end
surfdome/twitter-cldr-rb
lib/twitter_cldr/resources/properties/property_importer.rb
Ruby
apache-2.0
1,886
package me.itzg.mccy.services; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.context.embedded.EmbeddedServletContainerInitializedEvent; import org.springframework.context.ApplicationListener; import org.springframework.stereotype.Service; @Service public class WebServerPortProviderImpl implements WebServerPortProvider, ApplicationListener<EmbeddedServletContainerInitializedEvent> { private static final Logger LOG = LoggerFactory.getLogger(WebServerPortProviderImpl.class); private int port = -1; @Override public int getPort() { return port; } @Override public void onApplicationEvent(EmbeddedServletContainerInitializedEvent embeddedServletContainerInitializedEvent) { this.port = embeddedServletContainerInitializedEvent.getEmbeddedServletContainer().getPort(); LOG.debug("Discovered web container port to be {}", this.port); } }
moorkop/mccy-engine
src/main/java/me/itzg/mccy/services/WebServerPortProviderImpl.java
Java
apache-2.0
948
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.vcs.log.graph.actions; import com.intellij.vcs.log.graph.PrintElement; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** */ public interface GraphMouseAction { @Nullable PrintElement getAffectedElement(); @NotNull Type getType(); enum Type { CLICK, OVER } }
ernestp/consulo
platform/vcs-log-graph-api/src/com/intellij/vcs/log/graph/actions/GraphMouseAction.java
Java
apache-2.0
946
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.tools.cvd.control; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.Icon; import javax.swing.JOptionPane; import org.apache.uima.UIMAFramework; import org.apache.uima.impl.UimaVersion; import org.apache.uima.tools.cvd.MainFrame; import org.apache.uima.tools.images.Images; /** * The Class AboutHandler. */ public class AboutHandler implements ActionListener { /** The main. */ private final MainFrame main; /** * Instantiates a new about handler. * * @param frame * the frame */ public AboutHandler(MainFrame frame) { this.main = frame; } /* * (non-Javadoc) * * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ @Override public void actionPerformed(ActionEvent e) { String javaVersion = System.getProperty("java.version"); String javaVendor = System.getProperty("java.vendor"); javaVendor = (javaVendor == null) ? "<Unknown>" : javaVendor; String versionInfo = null; if (javaVersion == null) { versionInfo = "Running on an old version of Java"; } else { versionInfo = "Running Java " + javaVersion + " from " + javaVendor; } String msg = "CVD (CAS Visual Debugger)\n" + "Apache UIMA Version " + UIMAFramework.getVersionString() + " Copyright 2006, " + UimaVersion.getBuildYear() + " The Apache Software Foundation\n" + versionInfo + "\n"; Icon icon = Images.getImageIcon(Images.UIMA_LOGO_SMALL); if (icon == null) { JOptionPane.showMessageDialog(this.main, msg, "About CVD", JOptionPane.INFORMATION_MESSAGE); } else { JOptionPane.showMessageDialog(this.main, msg, "About CVD", JOptionPane.INFORMATION_MESSAGE, icon); } } }
apache/uima-uimaj
uimaj-tools/src/main/java/org/apache/uima/tools/cvd/control/AboutHandler.java
Java
apache-2.0
2,611
package com.mossle.bpm.data; import java.util.List; import java.util.Map; import javax.annotation.PostConstruct; import javax.annotation.Resource; import com.mossle.bpm.persistence.manager.BpmConfBaseManager; import com.mossle.bpm.persistence.manager.BpmConfListenerManager; import com.mossle.bpm.persistence.manager.BpmConfNodeManager; import com.mossle.core.csv.CsvProcessor; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ProcessListenerDeployer { private static Logger logger = LoggerFactory .getLogger(ProcessListenerDeployer.class); private BpmConfBaseManager bpmConfBaseManager; private BpmConfListenerManager bpmConfListenerManager; private BpmConfNodeManager bpmConfNodeManager; private String defaultTenantId = "1"; @PostConstruct public void init() throws Exception { String processListenerDataFilePath = "data/process-listener.csv"; String processListenerDataEncoding = "UTF-8"; ProcessListenerCallback processListenerCallback = new ProcessListenerCallback(); processListenerCallback.setBpmConfBaseManager(bpmConfBaseManager); processListenerCallback.setBpmConfNodeManager(bpmConfNodeManager); processListenerCallback .setBpmConfListenerManager(bpmConfListenerManager); new CsvProcessor().process(processListenerDataFilePath, processListenerDataEncoding, processListenerCallback); } @Resource public void setBpmConfBaseManager(BpmConfBaseManager bpmConfBaseManager) { this.bpmConfBaseManager = bpmConfBaseManager; } @Resource public void setBpmConfNodeManager(BpmConfNodeManager bpmConfNodeManager) { this.bpmConfNodeManager = bpmConfNodeManager; } @Resource public void setBpmConfListenerManager( BpmConfListenerManager bpmConfListenerManager) { this.bpmConfListenerManager = bpmConfListenerManager; } }
xuhuisheng/lemon
src/main/java/com/mossle/bpm/data/ProcessListenerDeployer.java
Java
apache-2.0
2,001
# # Copyright (c) 2013 Juniper Networks, Inc. All rights reserved. # import os import gevent import logging import kazoo.client import kazoo.exceptions import kazoo.handlers.gevent import kazoo.recipe.election from kazoo.client import KazooState from kazoo.retry import KazooRetry from bitarray import bitarray from cfgm_common.exceptions import ResourceExhaustionError, ResourceExistsError from gevent.coros import BoundedSemaphore import uuid LOG_DIR = '/var/log/contrail/' class IndexAllocator(object): def __init__(self, zookeeper_client, path, size=0, start_idx=0, reverse=False,alloc_list=None, max_alloc=0): self._size = size self._start_idx = start_idx if alloc_list is None: self._alloc_list = [{'start':start_idx, 'end':start_idx+size}] else: sorted_alloc_list = sorted(alloc_list, key=lambda k: k['start']) self._alloc_list = sorted_alloc_list alloc_count = len(self._alloc_list) total_size = 0 size = 0 #check for overlap in alloc_list --TODO for alloc_idx in range (0, alloc_count -1): idx_start_addr = self._alloc_list[alloc_idx]['start'] idx_end_addr = self._alloc_list[alloc_idx]['end'] next_start_addr = self._alloc_list[alloc_idx+1]['start'] if next_start_addr <= idx_end_addr: raise Exception( 'Allocation Lists Overlapping: %s' %(alloc_list)) size += idx_end_addr - idx_start_addr + 1 size += self._alloc_list[alloc_count-1]['end'] - self._alloc_list[alloc_count-1]['start'] + 1 if max_alloc == 0: self._max_alloc = size else: self._max_alloc = max_alloc self._zookeeper_client = zookeeper_client self._path = path self._in_use = bitarray('0') self._reverse = reverse for idx in self._zookeeper_client.get_children(path): idx_int = self._get_bit_from_zk_index(int(idx)) if idx_int >= 0: self._set_in_use(idx_int) # end for idx # end __init__ def _get_zk_index_from_bit(self, idx): size = idx if self._reverse: for alloc in reversed(self._alloc_list): size -= alloc['end'] - alloc['start'] + 1 if size < 0: return alloc['start']-size - 1 else: for alloc in self._alloc_list: size -= alloc['end'] - alloc['start'] + 1 if size < 0: return alloc['end']+size + 1 raise ResourceExhaustionError( 'Cannot get zk index from bit %s' %(idx)) # end _get_zk_index def _get_bit_from_zk_index(self, idx): size = 0 if self._reverse: for alloc in reversed(self._alloc_list): if alloc['start'] <= idx <= alloc['end']: return alloc['end'] - idx + size size += alloc['end'] - alloc['start'] + 1 pass else: for alloc in self._alloc_list: if alloc['start'] <= idx <= alloc['end']: return idx - alloc['start'] + size size += alloc['end'] - alloc['start'] + 1 return -1 # end _get_bit_from_zk_index def _set_in_use(self, bitnum): # if the index is higher than _max_alloc, do not use the bitarray, in # order to reduce the size of the bitarray. Otherwise, set the bit # corresponding to idx to 1 and extend the _in_use bitarray if needed if bitnum > self._max_alloc: return if bitnum >= self._in_use.length(): temp = bitarray(bitnum - self._in_use.length()) temp.setall(0) temp.append('1') self._in_use.extend(temp) else: self._in_use[bitnum] = 1 # end _set_in_use def _reset_in_use(self, bitnum): # if the index is higher than _max_alloc, do not use the bitarray, in # order to reduce the size of the bitarray. Otherwise, set the bit # corresponding to idx to 1 and extend the _in_use bitarray if needed if bitnum > self._max_alloc: return if bitnum >= self._in_use.length(): return else: self._in_use[bitnum] = 0 # end _reset_in_use def set_in_use(self, idx): bit_idx = self._get_bit_from_zk_index(idx) if bit_idx < 0: return self._set_in_use(bit_idx) # end set_in_use def reset_in_use(self, idx): bit_idx = self._get_bit_from_zk_index(idx) if bit_idx < 0: return self._reset_in_use(bit_idx) # end reset_in_use def get_alloc_count(self): return self._in_use.count() # end get_alloc_count def alloc(self, value=None): # Allocates a index from the allocation list if self._in_use.all(): idx = self._in_use.length() if idx > self._max_alloc: raise ResourceExhaustionError() self._in_use.append(1) else: idx = self._in_use.index(0) self._in_use[idx] = 1 idx = self._get_zk_index_from_bit(idx) try: # Create a node at path and return its integer value id_str = "%(#)010d" % {'#': idx} self._zookeeper_client.create_node(self._path + id_str, value) return idx except ResourceExistsError: return self.alloc(value) # end alloc def reserve(self, idx, value=None): # Reserves the requested index if available if not self._start_idx <= idx < self._start_idx + self._size: return None try: # Create a node at path and return its integer value id_str = "%(#)010d" % {'#': idx} self._zookeeper_client.create_node(self._path + id_str, value) self.set_in_use(idx) return idx except ResourceExistsError: self.set_in_use(idx) existing_value = self.read(idx) if (value == existing_value): # idempotent reserve return idx msg = 'For index %s reserve conflicts with existing value %s.' \ %(idx, existing_value) self._zookeeper_client.syslog(msg, level='notice') raise # end reserve def delete(self, idx): id_str = "%(#)010d" % {'#': idx} self._zookeeper_client.delete_node(self._path + id_str) bit_idx = self._get_bit_from_zk_index(idx) if 0 <= bit_idx < self._in_use.length(): self._in_use[bit_idx] = 0 # end delete def read(self, idx): id_str = "%(#)010d" % {'#': idx} id_val = self._zookeeper_client.read_node(self._path+id_str) if id_val is not None: bit_idx = self._get_bit_from_zk_index(idx) if bit_idx >= 0: self._set_in_use(bit_idx) return id_val # end read def empty(self): return not self._in_use.any() # end empty @classmethod def delete_all(cls, zookeeper_client, path): try: zookeeper_client.delete_node(path, recursive=True) except kazoo.exceptions.NotEmptyError: #TODO: Add retries for NotEmptyError zookeeper_client.syslog("NotEmptyError while deleting %s" % path) # end delete_all #end class IndexAllocator class ZookeeperClient(object): def __init__(self, module, server_list, logging_fn=None): # logging logger = logging.getLogger(module) logger.setLevel(logging.DEBUG) try: handler = logging.handlers.RotatingFileHandler( LOG_DIR + module + '-zk.log', maxBytes=10*1024*1024, backupCount=5) except IOError: print "Cannot open log file in %s" %(LOG_DIR) else: log_format = logging.Formatter('%(asctime)s [%(name)s]: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') handler.setFormatter(log_format) logger.addHandler(handler) if logging_fn: self.log = logging_fn else: self.log = self.syslog # KazooRetry to retry keeper CRUD operations self._retry = KazooRetry(max_tries=None, max_delay=300, sleep_func=gevent.sleep) self._zk_client = kazoo.client.KazooClient( server_list, timeout=400, handler=kazoo.handlers.gevent.SequentialGeventHandler(), logger=logger, connection_retry=self._retry, command_retry=self._retry) self._zk_client.add_listener(self._zk_listener) self._logger = logger self._election = None self._server_list = server_list self._conn_state = None self._sandesh_connection_info_update(status='INIT', message='') self._lost_cb = None self._suspend_cb = None self.connect() # end __init__ # start def connect(self): while True: try: self._zk_client.start() break except gevent.event.Timeout as e: # Update connection info self._sandesh_connection_info_update(status='DOWN', message=str(e)) gevent.sleep(1) # Zookeeper is also throwing exception due to delay in master election except Exception as e: # Update connection info self._sandesh_connection_info_update(status='DOWN', message=str(e)) gevent.sleep(1) # Update connection info self._sandesh_connection_info_update(status='UP', message='') # end def is_connected(self): return self._zk_client.state == KazooState.CONNECTED # end is_connected def syslog(self, msg, *args, **kwargs): if not self._logger: return level = kwargs.get('level', 'info') if isinstance(level, int): from pysandesh.sandesh_logger import SandeshLogger level = SandeshLogger.get_py_logger_level(level) self._logger.log(level, msg) return log_method = getattr(self._logger, level, self._logger.info) log_method(msg) # end syslog def set_lost_cb(self, lost_cb=None): # set a callback to be called when kazoo state is lost # set to None for default action self._lost_cb = lost_cb # end set_lost_cb def set_suspend_cb(self, suspend_cb=None): # set a callback to be called when kazoo state is suspend # set to None for default action self._suspend_cb = suspend_cb # end set_suspend_cb def _zk_listener(self, state): if state == KazooState.CONNECTED: if self._election: self._election.cancel() # Update connection info self._sandesh_connection_info_update(status='UP', message='') elif state == KazooState.LOST: # Lost the session with ZooKeeper Server # Best of option we have is to exit the process and restart all # over again self._sandesh_connection_info_update(status='DOWN', message='Connection to Zookeeper lost') if self._lost_cb: self._lost_cb() else: os._exit(2) elif state == KazooState.SUSPENDED: # Update connection info self._sandesh_connection_info_update(status='INIT', message = 'Connection to zookeeper lost. Retrying') if self._suspend_cb: self._suspend_cb() # end def master_election(self, path, identifier, func, *args, **kwargs): self._election = self._zk_client.Election(path, identifier) self._election.run(func, *args, **kwargs) # end master_election def create_node(self, path, value=None): try: if value is None: value = uuid.uuid4() retry = self._retry.copy() retry(self._zk_client.create, path, str(value), makepath=True) except kazoo.exceptions.NodeExistsError: current_value = self.read_node(path) if current_value == value: return True; raise ResourceExistsError(path, str(current_value), 'zookeeper') # end create_node def delete_node(self, path, recursive=False): try: retry = self._retry.copy() retry(self._zk_client.delete, path, recursive=recursive) except kazoo.exceptions.NoNodeError: pass except Exception as e: raise e # end delete_node def read_node(self, path, include_timestamp=False): try: retry = self._retry.copy() value = retry(self._zk_client.get, path) if include_timestamp: return value return value[0] except Exception: return None # end read_node def get_children(self, path): try: retry = self._retry.copy() return retry(self._zk_client.get_children, path) except Exception: return [] # end read_node def exists(self, path): try: retry = self._retry.copy() return retry(self._zk_client.exists, path) except Exception: return [] # end exists def _sandesh_connection_info_update(self, status, message): from pysandesh.connection_info import ConnectionState from pysandesh.gen_py.process_info.ttypes import ConnectionStatus from pysandesh.gen_py.process_info.ttypes import ConnectionType as ConnType from pysandesh.gen_py.sandesh.ttypes import SandeshLevel new_conn_state = getattr(ConnectionStatus, status) ConnectionState.update(conn_type = ConnType.ZOOKEEPER, name = 'Zookeeper', status = new_conn_state, message = message, server_addrs = self._server_list.split(',')) if (self._conn_state and self._conn_state != ConnectionStatus.DOWN and new_conn_state == ConnectionStatus.DOWN): msg = 'Connection to Zookeeper down: %s' %(message) self.log(msg, level=SandeshLevel.SYS_ERR) if (self._conn_state and self._conn_state != new_conn_state and new_conn_state == ConnectionStatus.UP): msg = 'Connection to Zookeeper ESTABLISHED' self.log(msg, level=SandeshLevel.SYS_NOTICE) self._conn_state = new_conn_state # end _sandesh_connection_info_update # end class ZookeeperClient
tcpcloud/contrail-controller
src/config/common/zkclient.py
Python
apache-2.0
14,983
#========================================================================== # # Copyright NumFOCUS # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0.txt # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #==========================================================================*/ # GeodesicActiveContourImageFilter.py # Translated by Charl P. Botha <http://cpbotha.net/> from the cxx original. # $Id: GeodesicActiveContourImageFilter.py,v 1.1 2006/09/06 20:58:42 glehmann # Exp $ # example runs: # ------------ # 1. Left ventricle: # python GeodesicActiveContourImageFilter.py \ # ../Data/BrainProtonDensitySlice.png lventricle.png \ # 81 114 5 1 -0.5 3 2 # # 2. White matter: # python GeodesicActiveContourImageFilter.py \ # ../Data/BrainProtonDensitySlice.png wmatter.png \ # 56 92 5 1 -0.3 2 10 # # See the ITK Software Guide, section 9.3.3 "Geodesic Active Contours # Segmentation" as well as the CXX example for more comments. import itk from sys import argv, stderr import os itk.auto_progress(2) def main(): if len(argv) < 10: errMsg = "Missing parameters\n" \ "Usage: %s\n" % (argv[0],) + \ " inputImage outputImage\n" \ " seedX seedY InitialDistance\n" \ " Sigma SigmoidAlpha SigmoidBeta\n" \ " PropagationScaling\n" print(errMsg, file=stderr) return # We're going to build the following pipelines: # 1. reader -> smoothing -> gradientMagnitude -> sigmoid -> FI # 2. fastMarching -> geodesicActiveContour(FI) -> thresholder -> writer # The output of pipeline 1 is a feature image that is used by the # geodesicActiveContour object. Also see figure 9.18 in the ITK # Software Guide. # we wan't to know what is happening # itk.auto_progress(True) InternalPixelType = itk.F Dimension = 2 InternalImageType = itk.Image[InternalPixelType, Dimension] OutputPixelType = itk.UC OutputImageType = itk.Image[OutputPixelType, Dimension] reader = itk.ImageFileReader[InternalImageType].New(FileName=argv[1]) # needed to give the size to the fastmarching filter reader.Update() outputDirectory = os.path.dirname(argv[2]) smoothing = itk.CurvatureAnisotropicDiffusionImageFilter[ InternalImageType, InternalImageType].New( reader, TimeStep=0.125, NumberOfIterations=5, ConductanceParameter=9.0) gradientMagnitude = itk.GradientMagnitudeRecursiveGaussianImageFilter[ InternalImageType, InternalImageType].New( smoothing, Sigma=float(argv[6])) sigmoid = itk.SigmoidImageFilter[InternalImageType, InternalImageType].New( gradientMagnitude, OutputMinimum=0.0, OutputMaximum=1.1, Alpha=float(argv[7]), Beta=float(argv[8])) seedPosition = itk.Index[2]() seedPosition.SetElement(0, int(argv[3])) seedPosition.SetElement(1, int(argv[4])) node = itk.LevelSetNode[InternalPixelType, Dimension]() node.SetValue(-float(argv[5])) node.SetIndex(seedPosition) seeds = itk.VectorContainer[ itk.UI, itk.LevelSetNode[InternalPixelType, Dimension]].New() seeds.Initialize() seeds.InsertElement(0, node) fastMarching = itk.FastMarchingImageFilter[ InternalImageType, InternalImageType].New( sigmoid, TrialPoints=seeds, SpeedConstant=1.0, OutputSize=reader.GetOutput().GetBufferedRegion().GetSize()) geodesicActiveContour = itk.GeodesicActiveContourLevelSetImageFilter[ InternalImageType, InternalImageType, InternalPixelType].New( fastMarching, # it is required to use the explicitly the FeatureImage # - itk segfault without that :-( FeatureImage=sigmoid.GetOutput(), PropagationScaling=float(argv[9]), CurvatureScaling=1.0, AdvectionScaling=1.0, MaximumRMSError=0.02, NumberOfIterations=800) thresholder = itk.BinaryThresholdImageFilter[ InternalImageType, OutputImageType].New( geodesicActiveContour, LowerThreshold=-1000, UpperThreshold=0, OutsideValue=0, InsideValue=255) writer = itk.ImageFileWriter[OutputImageType].New( thresholder, FileName=argv[2]) def rescaleAndWrite(filter, fileName): caster = itk.RescaleIntensityImageFilter[ InternalImageType, OutputImageType].New( filter, OutputMinimum=0, OutputMaximum=255) itk.imwrite(caster, os.path.join(outputDirectory, fileName)) rescaleAndWrite(smoothing, "GeodesicActiveContourImageFilterOutput1.png") rescaleAndWrite( gradientMagnitude, "GeodesicActiveContourImageFilterOutput2.png") rescaleAndWrite(sigmoid, "GeodesicActiveContourImageFilterOutput3.png") rescaleAndWrite( fastMarching, "GeodesicActiveContourImageFilterOutput4.png") writer.Update() print("") print( "Max. no. iterations: %d" % (geodesicActiveContour.GetNumberOfIterations())) print( "Max. RMS error: %.3f" % (geodesicActiveContour.GetMaximumRMSError())) print("") print( "No. elapsed iterations: %d" % (geodesicActiveContour.GetElapsedIterations())) print("RMS change: %.3f" % (geodesicActiveContour.GetRMSChange())) itk.imwrite(fastMarching, os.path.join(outputDirectory, "GeodesicActiveContourImageFilterOutput4.mha")) itk.imwrite(sigmoid, os.path.join(outputDirectory, "GeodesicActiveContourImageFilterOutput3.mha")) itk.imwrite(gradientMagnitude, os.path.join(outputDirectory, "GeodesicActiveContourImageFilterOutput2.mha")) if __name__ == "__main__": main()
malaterre/ITK
Modules/Segmentation/LevelSets/wrapping/test/GeodesicActiveContourImageFilterTest.py
Python
apache-2.0
6,301
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package flex.messaging.services.http;
azureplus/flex-blazeds
modules/proxy/src/flex/messaging/services/http/package-info.java
Java
apache-2.0
857
<?php /** * Copyright (c) 2017 Cornell University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Created by PhpStorm. * User: ch3 * Date: 8/9/2017 * Time: 10:10 AM */ namespace App\ResAppBundle\Controller; //use Sensio\Bundle\FrameworkExtraBundle\Configuration\Method; //use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route; use Sensio\Bundle\FrameworkExtraBundle\Configuration\Template; use Symfony\Component\Routing\Annotation\Route; use Symfony\Component\HttpFoundation\Request; use App\UserdirectoryBundle\Controller\UserRequestController; class ResAppUserRequestController extends UserRequestController { public function __construct() { $this->siteName = 'resapp'; $this->siteNameShowuser = 'resapp'; $this->siteNameStr = 'Residency Applications'; $this->roleEditor = 'ROLE_RESAPP_COORDINATOR'; } /** * Displays a form to create a new UserRequest entity. * * @Route("/account-requests/new", name="resapp_accountrequest_new", methods={"GET"}) * @Template("AppUserdirectoryBundle/UserRequest/account_request.html.twig") */ public function newAction() { return parent::newAction(); } /** * Creates a new UserRequest entity. * * @Route("/account-requests/new", name="resapp_accountrequest_create", methods={"POST"}) * @Template("AppUserdirectoryBundle/UserRequest/account_request.html.twig") */ public function createAction(Request $request) { return parent::createAction($request); } /** * Lists all UserRequest entities. * * @Route("/account-requests", name="resapp_accountrequest", methods={"GET"}) * @Template("AppUserdirectoryBundle/UserRequest/index.html.twig") */ public function indexAction( Request $request ) { return parent::indexAction($request); } /** * @Route("/account-requests/{id}/{status}/status", name="resapp_accountrequest_status", methods={"GET"}, requirements={"id" = "\d+"}) * @Template("AppUserdirectoryBundle/UserRequest/index.html.twig") */ public function statusAction($id, $status) { return parent::statusAction($id,$status); } /** * Update (Approve) a new UserRequest entity. * * @Route("/account-requests-approve", name="resapp_accountrequest_approve", methods={"POST"}) * @Template("AppUserdirectoryBundle/UserRequest/index.html.twig") */ public function approveUserAccountRequestAction(Request $request) { return parent::approveUserAccountRequestAction($request); } }
victorbrodsky/order-lab
orderflex/src/App/ResAppBundle/Controller/ResAppUserRequestController.php
PHP
apache-2.0
3,130
/*global emp, cmapi */ // Register a channel handler for MAP_FEATURE_PLOT_URL. cmapi.channel.handler[cmapi.channel.names.MAP_FEATURE_PLOT_URL] = { // args will have a message and sender property process: function (args) { var featureTransaction, mapServiceTransaction, i, len, mapServiceItems = [], featureItems = [], message = args.message, sender = args.sender, payload, //schema, item, visible, layers, bUseProxy; // get schema for this channel // can use for channel specific validation // schema = cmapi.channel.schema["map.feature.plot.url"]; if (!Array.isArray(message.payload)) { message.payload = [message.payload]; } len = message.payload.length; // plot.url can load kml, geojson, and WMS. // // We need to handle WMS differently that kml and geojson // kml and geoJSON will use the typeLibrary.Feature // WMS will use typeLibrary.WMS // If the message has an array of payload we will check to see if it is mixed between // WMS and others. This is an edge case, but is valid according to CMAPI // for (i = 0; i < len; i = i + 1) { payload = message.payload[i]; item = {}; visible = true; layers = []; // determine if we need to use the proxy. bUseProxy = payload.useProxy; if ((payload.overlayId === undefined) || (payload.overlayId === null)) { payload.overlayId = sender.id; } visible = true; if (payload.visible !== undefined && payload.visible === false) { visible = false; } if (!payload.hasOwnProperty('format')) { payload.format = 'kml'; } switch (payload.format.toLowerCase()) { case "wms": if (payload.hasOwnProperty("params")) { if (payload.params.hasOwnProperty("layers")) { if (!emp.util.isEmptyString(payload.params.layers)) { layers = payload.params.layers.split(","); // We need to remove the layers parameter. delete payload.params["layers"]; } else if (Array.isArray(payload.params.layers)) { layers = payload.params.layers; delete payload.params.layers; } } } item = new emp.typeLibrary.WMS({ id: payload.featureId, overlayId: payload.overlayId, visible: visible, layers: layers, zoom: payload.zoom, name: payload.name, format: payload.format, url: payload.url, useProxy: bUseProxy, params: payload.params, transactionId: message.messageId, messageId: payload.messageId, intent: emp.intents.control.MAP_SERVICE_ADD }); mapServiceItems.push(item); break; case "wmts": item = new emp.typeLibrary.WMTS({ id: payload.featureId, overlayId: payload.overlayId, visible: visible, name: payload.name, layer: payload.layer, format: payload.format, url: payload.url, useProxy: bUseProxy, params: payload.params, transactionId: message.messageId, messageId: payload.messageId, intent: emp.intents.control.MAP_SERVICE_ADD }); mapServiceItems.push(item); break; case "kmllayer": item = new emp.typeLibrary.KmlLayer({ id: payload.featureId, overlayId: payload.overlayId, visible: visible, name: payload.name, kmlData: payload.kmlString, format: payload.format, url: payload.url, useProxy: bUseProxy, transactionId: message.messageId, messageId: payload.messageId, intent: emp.intents.control.MAP_SERVICE_ADD }); mapServiceItems.push(item); break; case "geojson": default: item = new emp.typeLibrary.Feature({ featureId: payload.featureId, parentId: payload.parentId, overlayId: payload.overlayId, visible: visible, zoom: payload.zoom, name: payload.name, format: payload.format, url: payload.url, params: payload.params, properties: payload.properties }); item.validate(); featureItems.push(item); break; } } if (featureItems.length > 0) { featureTransaction = new emp.typeLibrary.Transaction({ intent: emp.intents.control.FEATURE_ADD, mapInstanceId: args.mapInstanceId, transactionId: message.messageId, sender: sender.id, originChannel: cmapi.channel.names.MAP_FEATURE_PLOT_URL, source: emp.api.cmapi.SOURCE, originalMessage: args.originalMessage, messageOriginator: sender.id, originalMessageType: cmapi.channel.names.MAP_FEATURE_PLOT_URL, items: featureItems }); featureTransaction.queue(); } if (mapServiceItems.length > 0) { mapServiceTransaction = new emp.typeLibrary.Transaction({ intent: emp.intents.control.MAP_SERVICE_ADD, mapInstanceId: args.mapInstanceId, transactionId: message.messageId, sender: sender.id, originChannel: cmapi.channel.names.MAP_FEATURE_PLOT_URL, source: emp.api.cmapi.SOURCE, originalMessage: args.originalMessage, messageOriginator: sender.id, originalMessageType: cmapi.channel.names.MAP_FEATURE_PLOT_URL, items: mapServiceItems }); mapServiceTransaction.queue(); } } };
missioncommand/emp3-web
src/sdk/core/api/cmapi/channel/handler/map.feature.plot.url.js
JavaScript
apache-2.0
6,584
/* * Copyright (c) 2014 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.docker.client.messages; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Date; import java.util.List; import java.util.Map; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.ANY; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; @JsonAutoDetect(fieldVisibility = ANY, setterVisibility = NONE, getterVisibility = NONE) public class ContainerInfo { @JsonProperty("Id") private String id; @JsonProperty("Created") private Date created; @JsonProperty("Path") private String path; @JsonProperty("Args") private ImmutableList<String> args; @JsonProperty("Config") private ContainerConfig config; @JsonProperty("HostConfig") private HostConfig hostConfig; @JsonProperty("State") private ContainerState state; @JsonProperty("Image") private String image; @JsonProperty("NetworkSettings") private NetworkSettings networkSettings; @JsonProperty("ResolvConfPath") private String resolvConfPath; @JsonProperty("HostnamePath") private String hostnamePath; @JsonProperty("HostsPath") private String hostsPath; @JsonProperty("Name") private String name; @JsonProperty("Driver") private String driver; @JsonProperty("ExecDriver") private String execDriver; @JsonProperty("ProcessLabel") private String processLabel; @JsonProperty("MountLabel") private String mountLabel; @JsonProperty("Volumes") private ImmutableMap<String, String> volumes; @JsonProperty("VolumesRW") private ImmutableMap<String, Boolean> volumesRW; public String id() { return id; } public Date created() { return created == null ? null : new Date(created.getTime()); } public String path() { return path; } public List<String> args() { return args; } public ContainerConfig config() { return config; } public HostConfig hostConfig() { return hostConfig; } public ContainerState state() { return state; } public String image() { return image; } public NetworkSettings networkSettings() { return networkSettings; } public String resolvConfPath() { return resolvConfPath; } public String hostnamePath() { return hostnamePath; } public String hostsPath() { return hostsPath; } public String name() { return name; } public String driver() { return driver; } public String execDriver() { return execDriver; } public String processLabel() { return processLabel; } public String mountLabel() { return mountLabel; } public Map<String, String> volumes() { return volumes; } public Map<String, Boolean> volumesRW() { return volumesRW; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final ContainerInfo that = (ContainerInfo) o; if (args != null ? !args.equals(that.args) : that.args != null) { return false; } if (config != null ? !config.equals(that.config) : that.config != null) { return false; } if (hostConfig != null ? !hostConfig.equals(that.hostConfig) : that.hostConfig != null) { return false; } if (created != null ? !created.equals(that.created) : that.created != null) { return false; } if (driver != null ? !driver.equals(that.driver) : that.driver != null) { return false; } if (execDriver != null ? !execDriver.equals(that.execDriver) : that.execDriver != null) { return false; } if (hostnamePath != null ? !hostnamePath.equals(that.hostnamePath) : that.hostnamePath != null) { return false; } if (hostsPath != null ? !hostsPath.equals(that.hostsPath) : that.hostsPath != null) { return false; } if (id != null ? !id.equals(that.id) : that.id != null) { return false; } if (image != null ? !image.equals(that.image) : that.image != null) { return false; } if (mountLabel != null ? !mountLabel.equals(that.mountLabel) : that.mountLabel != null) { return false; } if (name != null ? !name.equals(that.name) : that.name != null) { return false; } if (networkSettings != null ? !networkSettings.equals(that.networkSettings) : that.networkSettings != null) { return false; } if (path != null ? !path.equals(that.path) : that.path != null) { return false; } if (processLabel != null ? !processLabel.equals(that.processLabel) : that.processLabel != null) { return false; } if (resolvConfPath != null ? !resolvConfPath.equals(that.resolvConfPath) : that.resolvConfPath != null) { return false; } if (state != null ? !state.equals(that.state) : that.state != null) { return false; } if (volumes != null ? !volumes.equals(that.volumes) : that.volumes != null) { return false; } if (volumesRW != null ? !volumesRW.equals(that.volumesRW) : that.volumesRW != null) { return false; } return true; } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (created != null ? created.hashCode() : 0); result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (args != null ? args.hashCode() : 0); result = 31 * result + (config != null ? config.hashCode() : 0); result = 31 * result + (hostConfig != null ? hostConfig.hashCode() : 0); result = 31 * result + (state != null ? state.hashCode() : 0); result = 31 * result + (image != null ? image.hashCode() : 0); result = 31 * result + (networkSettings != null ? networkSettings.hashCode() : 0); result = 31 * result + (resolvConfPath != null ? resolvConfPath.hashCode() : 0); result = 31 * result + (hostnamePath != null ? hostnamePath.hashCode() : 0); result = 31 * result + (hostsPath != null ? hostsPath.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (driver != null ? driver.hashCode() : 0); result = 31 * result + (execDriver != null ? execDriver.hashCode() : 0); result = 31 * result + (processLabel != null ? processLabel.hashCode() : 0); result = 31 * result + (mountLabel != null ? mountLabel.hashCode() : 0); result = 31 * result + (volumes != null ? volumes.hashCode() : 0); result = 31 * result + (volumesRW != null ? volumesRW.hashCode() : 0); return result; } @Override public String toString() { return Objects.toStringHelper(this) .add("id", id) .add("created", created) .add("path", path) .add("args", args) .add("config", config) .add("hostConfig", hostConfig) .add("state", state) .add("image", image) .add("networkSettings", networkSettings) .add("resolvConfPath", resolvConfPath) .add("hostnamePath", hostnamePath) .add("hostsPath", hostsPath) .add("name", name) .add("driver", driver) .add("execDriver", execDriver) .add("processLabel", processLabel) .add("mountLabel", mountLabel) .add("volumes", volumes) .add("volumesRW", volumesRW) .toString(); } }
la3lma/docker-client
src/main/java/com/spotify/docker/client/messages/ContainerInfo.java
Java
apache-2.0
8,429
package li.strolch.utils; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; /** * Simple {@link ThreadFactory} which allocates as a pool and has a name for each pool */ public class NamedThreadPoolFactory implements ThreadFactory { private final ThreadGroup group; private final AtomicInteger threadNumber = new AtomicInteger(1); private final String poolName; public NamedThreadPoolFactory(String poolName) { SecurityManager s = System.getSecurityManager(); this.group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup(); this.poolName = poolName + "-"; } @Override public Thread newThread(Runnable r) { Thread t = new Thread(this.group, r, this.poolName + this.threadNumber.getAndIncrement(), 0); if (t.isDaemon()) t.setDaemon(false); if (t.getPriority() != Thread.NORM_PRIORITY) t.setPriority(Thread.NORM_PRIORITY); return t; } }
4treesCH/strolch
li.strolch.utils/src/main/java/li/strolch/utils/NamedThreadPoolFactory.java
Java
apache-2.0
934
//////////////////////////////////////////////////////////////////////////// // Module : alife_simulator_base2.cpp // Created : 25.12.2002 // Modified : 12.05.2004 // Author : Dmitriy Iassenev // Description : ALife Simulator base class //////////////////////////////////////////////////////////////////////////// #include "stdafx.h" #include "alife_simulator_base.h" #include "relation_registry.h" #include "alife_registry_wrappers.h" #include "xrServer_Objects_ALife_Items.h" #include "alife_graph_registry.h" #include "alife_object_registry.h" #include "alife_story_registry.h" #include "alife_schedule_registry.h" #include "alife_smart_terrain_registry.h" #include "alife_group_registry.h" using namespace ALife; void CALifeSimulatorBase::register_object (CSE_ALifeDynamicObject *object, bool add_object) { object->on_before_register (); if (add_object) objects().add (object); graph().update (object); scheduled().add (object); story_objects().add (object->m_story_id,object); smart_terrains().add (object); groups().add (object); setup_simulator (object); CSE_ALifeInventoryItem *item = smart_cast<CSE_ALifeInventoryItem*>(object); if (item && item->attached()) { CSE_ALifeDynamicObject *II = objects().object(item->base()->ID_Parent); #ifdef DEBUG if (std::find(II->children.begin(),II->children.end(),item->base()->ID) != II->children.end()) { Msg ("[LSS] Specified item [%s][%d] is already attached to the specified object [%s][%d]",item->base()->name_replace(),item->base()->ID,II->name_replace(),II->ID); FATAL ("[LSS] Cannot recover from the previous error!"); } #endif II->children.push_back (item->base()->ID); II->attach (item,true,false); } if (can_register_objects()) object->on_register (); } void CALifeSimulatorBase::unregister_object (CSE_ALifeDynamicObject *object, bool alife_query) { object->on_unregister (); CSE_ALifeInventoryItem *item = smart_cast<CSE_ALifeInventoryItem*>(object); if (item && item->attached()) graph().detach (*objects().object(item->base()->ID_Parent),item,objects().object(item->base()->ID_Parent)->m_tGraphID,alife_query); objects().remove (object->ID); story_objects().remove (object->m_story_id); smart_terrains().remove (object); groups().remove (object); if (!object->m_bOnline) { graph().remove (object,object->m_tGraphID); scheduled().remove (object); } else if (object->ID_Parent == 0xffff) { // if (object->used_ai_locations()) graph().level().remove (object,!object->used_ai_locations()); } } void CALifeSimulatorBase::on_death (CSE_Abstract *killed, CSE_Abstract *killer) { typedef CSE_ALifeOnlineOfflineGroup::MEMBER GROUP_MEMBER; CSE_ALifeCreatureAbstract *creature = smart_cast<CSE_ALifeCreatureAbstract*>(killed); if (creature) creature->on_death (killer); GROUP_MEMBER *member = smart_cast<GROUP_MEMBER*>(killed); if (!member) return; if (member->m_group_id == 0xffff) return; groups().object(member->m_group_id).notify_on_member_death (member); }
OLR-xray/OLR-3.0
src/xray/xr_3da/xrGame/alife_simulator_base2.cpp
C++
apache-2.0
3,113
package org.insightech.er.db.impl.db2; import org.insightech.er.db.impl.db2.tablespace.DB2TablespaceProperties; import org.insightech.er.editor.model.ERDiagram; import org.insightech.er.editor.model.dbexport.ddl.DDLCreator; import org.insightech.er.editor.model.diagram_contents.element.node.category.Category; import org.insightech.er.editor.model.diagram_contents.element.node.table.column.NormalColumn; import org.insightech.er.editor.model.diagram_contents.not_element.sequence.Sequence; import org.insightech.er.editor.model.diagram_contents.not_element.tablespace.Tablespace; import org.insightech.er.util.Check; import org.insightech.er.util.Format; public class DB2DDLCreator extends DDLCreator { public DB2DDLCreator(final ERDiagram diagram, final Category targetCategory, final boolean semicolon) { super(diagram, targetCategory, semicolon); } /** * {@inheritDoc} */ @Override protected String getColulmnDDL(final NormalColumn normalColumn) { final StringBuilder ddl = new StringBuilder(); ddl.append(super.getColulmnDDL(normalColumn)); if (normalColumn.isAutoIncrement()) { ddl.append(" GENERATED ALWAYS AS IDENTITY "); final Sequence sequence = normalColumn.getAutoIncrementSetting(); if (sequence.getIncrement() != null || sequence.getStart() != null) { ddl.append("(START WITH "); if (sequence.getStart() != null) { ddl.append(sequence.getStart()); } else { ddl.append("1"); } if (sequence.getIncrement() != null) { ddl.append(", INCREMENT BY "); ddl.append(sequence.getIncrement()); } ddl.append(")"); } } return ddl.toString(); } @Override protected String getDDL(final Tablespace tablespace) { final DB2TablespaceProperties tablespaceProperties = (DB2TablespaceProperties) tablespace.getProperties(environment, getDiagram()); final StringBuilder ddl = new StringBuilder(); ddl.append("CREATE "); if (!Check.isEmpty(tablespaceProperties.getType())) { ddl.append(tablespaceProperties.getType()); ddl.append(" "); } ddl.append("TABLESPACE "); ddl.append(filterName(tablespace.getName())); ddl.append(LF()); if (!Check.isEmpty(tablespaceProperties.getPageSize())) { ddl.append(" PAGESIZE "); ddl.append(tablespaceProperties.getPageSize()); ddl.append(LF()); } ddl.append(" MANAGED BY "); ddl.append(tablespaceProperties.getManagedBy()); ddl.append(" USING("); ddl.append(tablespaceProperties.getContainer()); ddl.append(")" + LF()); if (!Check.isEmpty(tablespaceProperties.getExtentSize())) { ddl.append(" EXTENTSIZE "); ddl.append(tablespaceProperties.getExtentSize()); ddl.append(LF()); } if (!Check.isEmpty(tablespaceProperties.getPrefetchSize())) { ddl.append(" PREFETCHSIZE "); ddl.append(tablespaceProperties.getPrefetchSize()); ddl.append(LF()); } if (!Check.isEmpty(tablespaceProperties.getBufferPoolName())) { ddl.append(" BUFFERPOOL "); ddl.append(tablespaceProperties.getBufferPoolName()); ddl.append(LF()); } if (semicolon) { ddl.append(";"); } return ddl.toString(); } @Override public String getDDL(final Sequence sequence) { final StringBuilder ddl = new StringBuilder(); final String description = sequence.getDescription(); if (semicolon && !Check.isEmpty(description) && ddlTarget.inlineTableComment) { ddl.append("-- "); ddl.append(replaceLF(description, LF() + "-- ")); ddl.append(LF()); } ddl.append("CREATE "); ddl.append("SEQUENCE "); ddl.append(filterName(getNameWithSchema(sequence.getSchema(), sequence.getName()))); if (!Check.isEmpty(sequence.getDataType())) { ddl.append(" AS "); String dataType = sequence.getDataType(); dataType = dataType.replaceAll("\\(p\\)", "(" + Format.toString(sequence.getDecimalSize() + ")")); ddl.append(dataType); } if (sequence.getIncrement() != null) { ddl.append(" INCREMENT BY "); ddl.append(sequence.getIncrement()); } if (sequence.getMinValue() != null) { ddl.append(" MINVALUE "); ddl.append(sequence.getMinValue()); } if (sequence.getMaxValue() != null) { ddl.append(" MAXVALUE "); ddl.append(sequence.getMaxValue()); } if (sequence.getStart() != null) { ddl.append(" START WITH "); ddl.append(sequence.getStart()); } if (!sequence.isNocache() && sequence.getCache() != null) { ddl.append(" CACHE "); ddl.append(sequence.getCache()); } if (sequence.isCycle()) { ddl.append(" CYCLE"); } if (sequence.isNocache()) { ddl.append(" NOCACHE"); } if (sequence.isOrder()) { ddl.append(" ORDER"); } if (semicolon) { ddl.append(";"); } return ddl.toString(); } }
roundrop/ermasterr
src/org/insightech/er/db/impl/db2/DB2DDLCreator.java
Java
apache-2.0
5,719
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.iotdata.model; import com.amazonaws.AmazonServiceException; /** * <p> * The rate exceeds the limit. * </p> */ public class ThrottlingException extends AmazonServiceException { private static final long serialVersionUID = 1L; /** * Constructs a new ThrottlingException with the specified error message. * * @param message * Describes the error encountered. */ public ThrottlingException(String message) { super(message); } }
sdole/aws-sdk-java
aws-java-sdk-iot/src/main/java/com/amazonaws/services/iotdata/model/ThrottlingException.java
Java
apache-2.0
1,104
/* * Copyright 2015 Systemic Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Sif.Framework.Model.Persistence; using System; using System.Collections.Generic; namespace Sif.Framework.Model.Infrastructure { /// <summary> /// The following Environment elements/properties are mandatory according to the SIF specification: /// /environment[@type] /// /environment/authenticationMethod /// /environment/consumerName /// /environment/applicationInfo/applicationKey /// /environment/applicationInfo/supportedInfrastructureVersion /// /environment/applicationInfo/supportedDataModel /// /environment/applicationInfo/supportedDataModelVersion /// </summary> public class Environment : IPersistable<Guid> { /// <summary> /// The ID of the Environment as managed by the Environment Provider. /// </summary> public virtual Guid Id { get; set; } public virtual ApplicationInfo ApplicationInfo { get; set; } /// <summary> /// Defines the way in which the applicationKey can be used to enforce security. /// </summary> public virtual string AuthenticationMethod { get; set; } /// <summary> /// A descriptive name for the application that will be readily identifiable to Zone Administrators if it /// becomes a Registered Consumer. /// </summary> public virtual string ConsumerName { get; set; } /// <summary> /// The default zone used by Consumer (and Provider?) service requests when no Zone is provided with the /// request. /// </summary> public virtual Zone DefaultZone { get; set; } /// <summary> /// There must be an InfrastructureService element present for each defined Infrastructure Service. The value /// of each InfrastructureService Property value subelement defines the URL location of that Infrastructure /// Service. /// </summary> public virtual IDictionary<InfrastructureServiceNames, InfrastructureService> InfrastructureServices { get; set; } public virtual string InstanceId { get; set; } public virtual IDictionary<string, ProvisionedZone> ProvisionedZones { get; set; } /// <summary> /// The ID associated with an instance of the Environment. /// </summary> public virtual string SessionToken { get; set; } /// <summary> /// The solution the Application would like to participate in. This is optional only, is advisory, and may be /// ignored by the Administrator. If processed it may be reflected in the URLs of the infrastructure services /// which are provided in the consumerEnvironment. /// </summary> public virtual string SolutionId { get; set; } /// <summary> /// Defines whether the connection to the Environment is DIRECT or BROKERED. /// </summary> public virtual EnvironmentType Type { get; set; } public virtual string UserToken { get; set; } /// <summary> /// /// </summary> public Environment() { } /// <summary> /// /// </summary> /// <param name="applicationKey"></param> /// <param name="instanceId"></param> /// <param name="userToken"></param> /// <param name="solutionId"></param> public Environment(string applicationKey, string instanceId = null, string userToken = null, string solutionId = null) { if (!String.IsNullOrWhiteSpace(applicationKey)) { ApplicationInfo = new ApplicationInfo(); ApplicationInfo.ApplicationKey = applicationKey; } if (!String.IsNullOrWhiteSpace(instanceId)) { InstanceId = instanceId; } if (!String.IsNullOrWhiteSpace(userToken)) { UserToken = userToken; } if (!String.IsNullOrWhiteSpace(solutionId)) { SolutionId = solutionId; } } } }
nsip/Sif3Framework-dotNet
Code/Sif3Framework/Sif.Framework/Model/Infrastructure/Environment.cs
C#
apache-2.0
4,699
/* * #%L * Protempa Framework * %% * Copyright (C) 2012 - 2013 Emory University * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.protempa; import org.protempa.valueset.ValueSet; import java.util.List; import java.util.Set; import org.protempa.backend.KnowledgeSourceBackendUpdatedEvent; import org.protempa.backend.ksb.KnowledgeSourceBackend; /** * @author Andrew Post */ public interface KnowledgeSource extends Source<KnowledgeSourceUpdatedEvent, KnowledgeSourceBackend, KnowledgeSourceBackendUpdatedEvent> { boolean hasAbstractionDefinition(String id) throws KnowledgeSourceReadException; boolean hasPropositionDefinition(String id) throws KnowledgeSourceReadException; boolean hasTemporalPropositionDefinition(String id) throws KnowledgeSourceReadException; boolean hasContextDefinition(String id) throws KnowledgeSourceReadException; boolean hasValueSet(String id) throws KnowledgeSourceReadException; Set<PropositionDefinition> collectPropDefDescendantsUsingAllNarrower(boolean inDataSourceOnly, String... propIds) throws KnowledgeSourceReadException; Set<String> collectPropIdDescendantsUsingAllNarrower(boolean inDataSourceOnly, String... propIds) throws KnowledgeSourceReadException; Set<PropositionDefinition> collectPropDefDescendantsUsingInverseIsA(String... propIds) throws KnowledgeSourceReadException; Set<String> collectPropIdDescendantsUsingInverseIsA(String... propIds) throws KnowledgeSourceReadException; List<PropositionDefinition> readAbstractedFrom(AbstractionDefinition propDef) throws KnowledgeSourceReadException; List<PropositionDefinition> readAbstractedFrom(String id) throws KnowledgeSourceReadException; List<AbstractionDefinition> readAbstractedInto(PropositionDefinition propDef) throws KnowledgeSourceReadException; List<AbstractionDefinition> readAbstractedInto(String propId) throws KnowledgeSourceReadException; List<String> readAbstractedIntoPropIds(PropositionDefinition propDef) throws KnowledgeSourceReadException; List<String> readAbstractedIntoPropIds(String id) throws KnowledgeSourceReadException; AbstractionDefinition readAbstractionDefinition(String id) throws KnowledgeSourceReadException; ContextDefinition readContextDefinition(String id) throws KnowledgeSourceReadException; List<PropositionDefinition> readInverseIsA(PropositionDefinition propDef) throws KnowledgeSourceReadException; List<PropositionDefinition> readInverseIsA(String id) throws KnowledgeSourceReadException; List<PropositionDefinition> readIsA(PropositionDefinition propDef) throws KnowledgeSourceReadException; List<PropositionDefinition> readIsA(String id) throws KnowledgeSourceReadException; List<String> readIsAPropIds(PropositionDefinition propDef) throws KnowledgeSourceReadException; List<String> readIsAPropIds(String id) throws KnowledgeSourceReadException; List<ContextDefinition> readSubContexts(String id) throws KnowledgeSourceReadException; List<ContextDefinition> readSubContexts(ContextDefinition contextDef) throws KnowledgeSourceReadException; List<ContextDefinition> readSubContextOfs(String id) throws KnowledgeSourceReadException; List<ContextDefinition> readSubContextOfs(ContextDefinition contextDef) throws KnowledgeSourceReadException; List<String> readSubContextOfPropIds(String id) throws KnowledgeSourceReadException; List<String> readSubContextOfPropIds(ContextDefinition contextDef) throws KnowledgeSourceReadException ; List<ContextDefinition> readInduces(String tempPropDef) throws KnowledgeSourceReadException; List<ContextDefinition> readInduces(TemporalPropositionDefinition tempPropDef) throws KnowledgeSourceReadException; List<String> readInducesPropIds(String id) throws KnowledgeSourceReadException; List<String> readInducesPropIds(TemporalPropositionDefinition tempPropDef) throws KnowledgeSourceReadException; List<TemporalPropositionDefinition> readInducedBy(String contextId) throws KnowledgeSourceReadException; List<TemporalPropositionDefinition> readInducedBy(ContextDefinition contextDef) throws KnowledgeSourceReadException; /** * Returns the specified proposition definition. * * @param id a proposition id {@link String}. Cannot be <code>null</code>. * @return a {@link PropositionDefinition}, or <code>null</code> if none was * found with the given <code>id</code>. * @throws KnowledgeSourceReadException if an error occurred reading from * the knowledge base. */ PropositionDefinition readPropositionDefinition(String id) throws KnowledgeSourceReadException; TemporalPropositionDefinition readTemporalPropositionDefinition(String id) throws KnowledgeSourceReadException; ValueSet readValueSet(String id) throws KnowledgeSourceReadException; List<PropositionDefinition> readParents(PropositionDefinition propDef) throws KnowledgeSourceReadException; List<PropositionDefinition> readParents(String propId) throws KnowledgeSourceReadException; List<String> readParentPropIds(PropositionDefinition propDef) throws KnowledgeSourceReadException; List<String> readParentPropIds(String propId) throws KnowledgeSourceReadException; List<String> getMatchingPropIds(String searchKey) throws KnowledgeSourceReadException; List<PropositionDefinition> readPropositionDefinitions(String... propIds) throws KnowledgeSourceReadException; List<AbstractionDefinition> readAbstractionDefinitions(String... propIds) throws KnowledgeSourceReadException; List<TemporalPropositionDefinition> readTemporalPropositionDefinitions(String... propIds) throws KnowledgeSourceReadException; List<ContextDefinition> readContextDefinitions(String... propIds) throws KnowledgeSourceReadException; }
eurekaclinical/protempa
protempa-framework/src/main/java/org/protempa/KnowledgeSource.java
Java
apache-2.0
6,587
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment; import io.druid.collections.bitmap.BitmapFactory; import io.druid.collections.bitmap.ImmutableBitmap; import io.druid.collections.bitmap.MutableBitmap; import io.druid.collections.bitmap.WrappedImmutableRoaringBitmap; import io.druid.collections.bitmap.WrappedRoaringBitmap; import io.druid.extendedset.intset.EmptyIntIterator; import io.druid.java.util.common.RE; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.data.Offset; import io.druid.segment.data.RoaringBitmapSerdeFactory; import org.roaringbitmap.IntIterator; import java.util.Arrays; import java.util.HashSet; /** */ public class BitmapOffset extends Offset { private static final int INVALID_VALUE = -1; private static final BitmapFactory ROARING_BITMAP_FACTORY = new RoaringBitmapSerdeFactory(false).getBitmapFactory(); /** * Currently the default stops are not consciously optimized for the goals described in {@link #factorizeFullness}. * They are chosen intuitively. There was no experimentation with different bitmapFullnessFactorizationStops. * Experimentation and performance feedback with a different set of stops is welcome. */ private static final String DEFAULT_FULLNESS_FACTORIZATION_STOPS = "0.01,0.1,0.3,0.5,0.7,0.9,0.99"; private static final double[] BITMAP_FULLNESS_FACTORIZATION_STOPS; private static final String[] FACTORIZED_FULLNESS; static { String stopString = System.getProperty("bitmapFullnessFactorizationStops", DEFAULT_FULLNESS_FACTORIZATION_STOPS); String[] stopsArray = stopString.split(","); if (stopsArray.length == 0) { throw new RE("Empty bitmapFullnessFactorizationStops: " + stopString); } if (new HashSet<>(Arrays.asList(stopsArray)).size() != stopsArray.length) { throw new RE("Non unique bitmapFullnessFactorizationStops: " + stopString); } BITMAP_FULLNESS_FACTORIZATION_STOPS = new double[stopsArray.length]; for (int i = 0; i < stopsArray.length; i++) { String stop = stopsArray[i]; BITMAP_FULLNESS_FACTORIZATION_STOPS[i] = Double.parseDouble(stop); } Arrays.sort(BITMAP_FULLNESS_FACTORIZATION_STOPS); double firstStop = BITMAP_FULLNESS_FACTORIZATION_STOPS[0]; if (Double.isNaN(firstStop) || firstStop <= 0.0) { throw new RE("First bitmapFullnessFactorizationStop[%d] should be > 0", firstStop); } double lastStop = BITMAP_FULLNESS_FACTORIZATION_STOPS[stopsArray.length - 1]; if (Double.isNaN(lastStop) || lastStop >= 1) { throw new RE("Last bitmapFullnessFactorizationStop[%d] should be < 1", lastStop); } String prevStop = "0"; FACTORIZED_FULLNESS = new String[stopsArray.length + 1]; for (int i = 0; i < stopsArray.length; i++) { String stop = String.valueOf(BITMAP_FULLNESS_FACTORIZATION_STOPS[i]); FACTORIZED_FULLNESS[i] = "(" + prevStop + ", " + stop + "]"; prevStop = stop; } FACTORIZED_FULLNESS[stopsArray.length] = "(" + prevStop + ", 1)"; } /** * Processing of queries with BitmapOffsets, whose Bitmaps has different factorized fullness (bucket), reported from * this method, uses different copies of the same code, so JIT compiler analyzes and compiles the code for different * factorized fullness separately. The goal is to capture frequency of abstraction usage in compressed bitmap * algorithms, i. e. * - "Zero sequence" vs. "Literal" vs. "One sequence" in {@link io.druid.extendedset.intset.ImmutableConciseSet} * - {@link org.roaringbitmap.ArrayContainer} vs {@link org.roaringbitmap.BitmapContainer} in Roaring * and then https://shipilev.net/blog/2015/black-magic-method-dispatch/ comes into play. The secondary goal is to * capture HotSpot's thresholds, which it uses to compile conditional blocks differently inside bitmap impls. See * https://bugs.openjdk.java.net/browse/JDK-6743900. The default BlockLayoutMinDiamondPercentage=20, i. e. if * probability of taking some branch is less than 20%, it is moved out of the hot path (to save some icache?). * * On the other hand, we don't want to factor fullness into too small pieces, because * - too little queries may fall into those small buckets, and they are not compiled with Hotspot's C2 compiler * - if there are a lot of queries for each small factorized fullness and their copies of the code is compiled by * C2, this pollutes code cache and takes time to perform too many compilations, while some of them likely produce * identical code. * * Ideally there should be as much buckets as possible as long as Hotspot's C2 output for each bucket is different. */ private static String factorizeFullness(long bitmapCardinality, long numRows) { if (bitmapCardinality == 0) { return "0"; } else if (bitmapCardinality == numRows) { return "1"; } else { double fullness = bitmapCardinality / (double) numRows; int index = Arrays.binarySearch(BITMAP_FULLNESS_FACTORIZATION_STOPS, fullness); if (index < 0) { index = ~index; } return FACTORIZED_FULLNESS[index]; } } final IntIterator itr; final String fullness; int val; public static IntIterator getReverseBitmapOffsetIterator(ImmutableBitmap bitmapIndex) { ImmutableBitmap roaringBitmap = bitmapIndex; if (!(bitmapIndex instanceof WrappedImmutableRoaringBitmap)) { final MutableBitmap bitmap = ROARING_BITMAP_FACTORY.makeEmptyMutableBitmap(); final IntIterator iterator = bitmapIndex.iterator(); while (iterator.hasNext()) { bitmap.add(iterator.next()); } roaringBitmap = ROARING_BITMAP_FACTORY.makeImmutableBitmap(bitmap); } return ((WrappedImmutableRoaringBitmap) roaringBitmap).getBitmap().getReverseIntIterator(); } public static BitmapOffset of(ImmutableBitmap bitmapIndex, boolean descending, long numRows) { if (bitmapIndex instanceof WrappedImmutableRoaringBitmap || bitmapIndex instanceof WrappedRoaringBitmap || descending) { return new RoaringBitmapOffset(bitmapIndex, descending, numRows); } else { return new BitmapOffset(bitmapIndex, descending, numRows); } } private BitmapOffset(ImmutableBitmap bitmapIndex, boolean descending, long numRows) { this.itr = newIterator(bitmapIndex, descending); this.fullness = factorizeFullness(bitmapIndex.size(), numRows); increment(); } private IntIterator newIterator(ImmutableBitmap bitmapIndex, boolean descending) { if (!descending) { return bitmapIndex.iterator(); } else { return getReverseBitmapOffsetIterator(bitmapIndex); } } private BitmapOffset(String fullness, IntIterator itr, int val) { this.fullness = fullness; this.itr = itr; this.val = val; } @Override public void increment() { if (itr.hasNext()) { val = itr.next(); } else { val = INVALID_VALUE; } } @Override public boolean withinBounds() { return val > INVALID_VALUE; } @Override public Offset clone() { return new BitmapOffset(fullness, itr.clone(), val); } @Override public int getOffset() { return val; } @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { inspector.visit("itr", itr); inspector.visit("fullness", fullness); } public static class RoaringBitmapOffset extends BitmapOffset { public RoaringBitmapOffset(ImmutableBitmap bitmapIndex, boolean descending, long numRows) { super(bitmapIndex, descending, numRows); } RoaringBitmapOffset(String fullness, IntIterator itr, int val) { super(fullness, itr, val); } @Override public Offset clone() { return new RoaringBitmapOffset(fullness, itr.hasNext() ? itr.clone() : EmptyIntIterator.instance(), val); } } }
lizhanhui/data_druid
processing/src/main/java/io/druid/segment/BitmapOffset.java
Java
apache-2.0
8,675
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.httpserver; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.io.CharStreams; import com.google.common.net.MediaType; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.handler.AbstractHandler; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Writer; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * HTTP handler for requests to the {@code /tracedata} path. */ class TraceDataHandler extends AbstractHandler { static final Pattern ID_PATTERN = Pattern.compile("/([0-9a-zA-Z-]+)"); @VisibleForTesting static final Pattern CALLBACK_PATTERN = Pattern.compile("[\\w\\.]+"); private final TracesHelper tracesHelper; TraceDataHandler(TracesHelper tracesHelper) { this.tracesHelper = Preconditions.checkNotNull(tracesHelper); } @Override public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { if ("GET".equals(baseRequest.getMethod())) { doGet(baseRequest, response); } else { Responses.writeFailedResponse(baseRequest, response); } } private void doGet(Request baseRequest, HttpServletResponse response) throws ServletException, IOException { String path = baseRequest.getPathInfo(); Matcher matcher = ID_PATTERN.matcher(path); if (!matcher.matches()) { Responses.writeFailedResponse(baseRequest, response); return; } String id = matcher.group(1); response.setContentType(MediaType.JAVASCRIPT_UTF_8.toString()); response.setStatus(HttpServletResponse.SC_OK); boolean hasValidCallbackParam = false; Writer responseWriter = response.getWriter(); String callback = baseRequest.getParameter("callback"); if (callback != null) { Matcher callbackMatcher = CALLBACK_PATTERN.matcher(callback); if (callbackMatcher.matches()) { hasValidCallbackParam = true; responseWriter.write(callback); responseWriter.write("("); } } try ( InputStream input = tracesHelper.getInputForTrace(id); InputStreamReader inputStreamReader = new InputStreamReader(input)) { CharStreams.copy(inputStreamReader, responseWriter); } if (hasValidCallbackParam) { responseWriter.write(");\n"); } response.flushBuffer(); baseRequest.setHandled(true); } }
saleeh93/buck-cutom
src/com/facebook/buck/httpserver/TraceDataHandler.java
Java
apache-2.0
3,302
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.blur.store.blockcache_v2; public interface Size { int getSize(CacheDirectory directory, String fileName); }
roshanp/lucene-hdfs-directory
src/main/java/org/apache/blur/store/blockcache_v2/Size.java
Java
apache-2.0
937
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using Nest.Litterateur.Documentation.Files; namespace Nest.Litterateur { public static class LitUp { private static readonly string[] SkipFolders = { "Nest.Tests.Literate", "Debug", "Release" }; public static IEnumerable<DocumentationFile> InputFiles(string path) => from f in Directory.GetFiles(Program.InputDirPath, $"{path}", SearchOption.AllDirectories) let dir = new DirectoryInfo(f) where dir?.Parent != null && !SkipFolders.Contains(dir.Parent.Name) select DocumentationFile.Load(new FileInfo(f)); public static IEnumerable<IEnumerable<DocumentationFile>> Input { get { yield return InputFiles("*.doc.cs"); yield return InputFiles("*UsageTests.cs"); yield return InputFiles("*.png"); yield return InputFiles("*.gif"); yield return InputFiles("*.jpg"); // process asciidocs last as they may have generated // includes to other output asciidocs yield return InputFiles("*.asciidoc"); } } public static void Go(string[] args) { foreach (var file in Input.SelectMany(s => s)) { file.SaveToDocumentationFolder(); } #if !DOTNETCORE if (Debugger.IsAttached) Console.WriteLine("Press any key to continue..."); Console.ReadKey(); #endif } } }
UdiBen/elasticsearch-net
src/CodeGeneration/Nest.Litterateur/LitUp.cs
C#
apache-2.0
1,352
/** * Copyright © 2015 Christian Wulf, Nelson Tavares de Sousa (http://teetime-framework.github.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package teetime.stage.taskfarm; import java.util.ArrayList; import java.util.List; import teetime.framework.CompositeStage; import teetime.framework.InputPort; import teetime.framework.OutputPort; import teetime.stage.basic.ITransformation; import teetime.stage.basic.distributor.Distributor; import teetime.stage.basic.merger.Merger; /** * Represents the task farm parallelization pattern in TeeTime. * * @author Christian Claus Wiechmann, Christian Wulf * * @param <I> * Input type of Task Farm * @param <O> * Output type of Task Farm * @param <T> * Type of the parallelized stage */ public class StaticTaskFarmStage<I, O, T extends ITaskFarmDuplicable<I, O>> extends CompositeStage implements ITransformation<I, O> { private static final int MAX_NUMBER_OF_STAGES = Runtime.getRuntime().availableProcessors(); private final Distributor<I> distributor; private final Merger<O> merger; /** List of all currently existing worker stages */ private final List<ITaskFarmDuplicable<I, O>> workerStages; private InputPort<I> inputPort; private OutputPort<O> outputPort; /** * Creates a task farm stage with <i>n</i> worker stages and a pipe capacity of {@value #DEFAULT_PIPE_CAPACITY}, where <i>n</i> * is * * <pre> * Runtime.getRuntime().availableProcessors() * </pre> * * @param workerStage */ public StaticTaskFarmStage(final T workerStage) { this(workerStage, MAX_NUMBER_OF_STAGES, DEFAULT_PIPE_CAPACITY); } public StaticTaskFarmStage(final T workerStage, final int numberStages) { this(workerStage, numberStages, DEFAULT_PIPE_CAPACITY); } public StaticTaskFarmStage(final T workerStage, final int numberStages, final int pipeCapacity) { this(workerStage, numberStages, pipeCapacity, new Distributor<I>(), new Merger<O>()); } protected StaticTaskFarmStage(final T workerStage, final int numberStages, final int pipeCapacity, final Distributor<I> distributor, final Merger<O> merger) { super(); if (null == workerStage) { throw new IllegalArgumentException("The constructor of a Task Farm may not be called with null as the worker stage."); } if (numberStages < 1) { throw new IllegalArgumentException("The number of worker stages must be at least 1."); } if (pipeCapacity < 1) { throw new IllegalArgumentException("The capacity of the pipe(s) must be at least 1."); } this.distributor = distributor; this.merger = merger; this.workerStages = new ArrayList<ITaskFarmDuplicable<I, O>>(); this.init(workerStage, numberStages, pipeCapacity); } private void init(final T workerStage, final int numberStages, final int pipeCapacity) { connectWorkerStage(workerStage, pipeCapacity); workerStage.getInputPort().getOwningStage().declareActive(); for (int i = 1; i < numberStages; i++) { ITaskFarmDuplicable<I, O> duplicatedWorkerStage = workerStage.duplicate(); connectWorkerStage(duplicatedWorkerStage, pipeCapacity); duplicatedWorkerStage.getInputPort().getOwningStage().declareActive(); } if (numberStages > 1) { this.merger.declareActive(); } // map outer ports to inner ports inputPort = createInputPort(this.distributor.getInputPort()); outputPort = createOutputPort(this.merger.getOutputPort()); } private void connectWorkerStage(final ITaskFarmDuplicable<I, O> workerStage, final int pipeCapacity) { final InputPort<I> stageInputPort = workerStage.getInputPort(); connectPorts(this.distributor.getNewOutputPort(), stageInputPort, pipeCapacity); final OutputPort<O> stageOutputPort = workerStage.getOutputPort(); connectPorts(stageOutputPort, this.merger.getNewInputPort(), pipeCapacity); this.workerStages.add(workerStage); } /** * Returns the input port of the task farm/distributor of the task farm. * * @return input port of the task farm */ @Override public InputPort<I> getInputPort() { return inputPort; } /** * Returns the output port of the task farm/merger of the task farm. * * @return output port of the task farm */ @Override public OutputPort<O> getOutputPort() { return outputPort; } // /** // * Declares the internal distributor to be executed by an own thread. // */ // @Override // public void declareActive() { // distributor.declareActive(); // } // // @Override // public StageState getCurrentState() { // return distributor.getCurrentState(); // } /* default */ Distributor<I> getDistributor() { return distributor; } /* default */ Merger<O> getMerger() { return merger; } protected int getPipeCapacity() { return distributor.getOutputPorts().get(0).getPipe().capacity(); } /** * @return a list of all currently existing worker stages */ public List<ITaskFarmDuplicable<I, O>> getWorkerStages() { return workerStages; } }
teetime-framework/teetime
src/main/java/teetime/stage/taskfarm/StaticTaskFarmStage.java
Java
apache-2.0
5,451
/* * MeetMeTalkingRequestEvent.cpp * * Created on: Mar 14, 2012 * Author: augcampos */ #include "asteriskcpp/manager/events/MeetMeTalkingRequestEvent.h" namespace asteriskcpp { MeetMeTalkingRequestEvent::MeetMeTalkingRequestEvent(const std::string & values) : AbstractMeetMeEvent(values) { } MeetMeTalkingRequestEvent::~MeetMeTalkingRequestEvent() { } bool MeetMeTalkingRequestEvent::getStatus() const { return (getProperty<bool>("Status")); } } /* namespace asteriskcpp */
tiijima/asterisk-cpp
asterisk-cpp/src/manager/events/MeetMeTalkingRequestEvent.cpp
C++
apache-2.0
526
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.DNSCache = exports.DNS_DEFAULT_EXPIRE = void 0; var _dns = _interopRequireDefault(require("dns")); var _net = _interopRequireDefault(require("net")); var _logger = require("./logger"); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } async function lookup(hostname) { return new Promise((resolve, reject) => { _dns.default.lookup(hostname, function (err, address) { if (err) { reject(err); } else { resolve(address); } }); }); } function now() { return Date.now(); } const DNS_DEFAULT_EXPIRE = 3600000; exports.DNS_DEFAULT_EXPIRE = DNS_DEFAULT_EXPIRE; class DNSCache { static init(expire) { if (typeof expire === 'number' && expire >= 0) { DNSCache.expire = expire; } DNSCache.pool = {}; } static async get(hostname) { if (_net.default.isIP(hostname)) { return hostname; } let address = null; if (!DNSCache.pool[hostname]) { address = await lookup(hostname); DNSCache._put(hostname, address); } else { const [addr, expire] = DNSCache.pool[hostname]; const _now = now(); if (_now >= expire) { delete DNSCache.pool[hostname]; } _logger.logger.verbose(`[dns-cache] hit: hostname=${hostname} resolved=${addr} ttl=${expire - _now}ms`); address = addr; } return address; } static clear() { DNSCache.pool = {}; } static _put(hostname, address) { if (DNSCache.expire > 0) { const expire = now() + DNSCache.expire; DNSCache.pool[hostname] = [address, expire]; } } } exports.DNSCache = DNSCache; _defineProperty(DNSCache, "pool", {}); _defineProperty(DNSCache, "expire", DNS_DEFAULT_EXPIRE);
blinksocks/blinksocks
lib/utils/dns-cache.js
JavaScript
apache-2.0
2,047
# Copyright (c) 2018 by contributors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # coding: utf-8 import sys import os import ctypes from .libpath import find_lib_path class XLearnError(Exception): """Error thrown by xlearn trainer""" pass def _load_lib(): """Load xlearn shared library""" lib_path = find_lib_path() if len(lib_path) == 0: return None lib = ctypes.cdll.LoadLibrary(lib_path[0]) return lib # load the xlearn library globally _LIB = _load_lib() def _check_call(ret): """Check the return value of C API call This function will raise exception when error occurs. Wrap every API call with this function Parameters ---------- ret : int return value from API calls """ if ret != 0: msg = "" # raise XLearnError() _LIB.XLearnGetLastError.restype = ctypes.POINTER(ctypes.c_ubyte) ptr = _LIB.XLearnGetLastError() idx = 0 while(ptr[idx] != 0): msg += chr(ptr[idx]) idx += 1 raise XLearnError(msg) # type definitions XLearnHandle = ctypes.c_void_p if sys.version_info[0] < 3: def c_str(string): """Create ctypes char * from a Python string. Parameters ---------- string : string type Pyrhon string. Returns ------- str : c_char_p A char pointer that can be passed to C API. Examples -------- >>> x = c_str("Hello, world!") >>> print x.value Hello, world! """ return ctypes.c_char_p(string) else: def c_str(string): """Create ctypes char * from a Python string. Parameters ---------- string : string type Pyrhon string. Returns ------- str : c_char_p A char pointer that can be passed to C API. Examples -------- >>> x = c_str("Hello, world!") >>> print(x.value) Hello, world! """ return ctypes.c_char_p(string.encode('utf-8')) """pandas""" try: from pandas import Series, DataFrame except ImportError: class Series(object): """Dummy class for pandas.Series.""" pass class DataFrame(object): """Dummy class for pandas.DataFrame.""" pass
PKU-Cloud-Lab/xLearn
python-package/xlearn/base.py
Python
apache-2.0
2,861
/** #if (${NAMESPACE}) * @package ${NAMESPACE} #end */
wumouse/phpstorm-setting
fileTemplates/includes/PHP Interface Doc Comment.php
PHP
apache-2.0
56
/** * Copyright 2014 SAP AG * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.spotter.core.instrumentation; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import org.aim.api.exceptions.InstrumentationException; import org.aim.description.InstrumentationDescription; import org.lpe.common.extension.IExtension; import org.lpe.common.util.system.LpeSystemUtils; /** * The instrumentation broker manages the distribution of instrumentation * commands. * * @author Alexander Wert * */ public final class InstrumentationBroker implements IInstrumentationAdapter { private static InstrumentationBroker instance; /** * * @return singleton instance */ public static synchronized InstrumentationBroker getInstance() { if (instance == null) { instance = new InstrumentationBroker(); } return instance; } private final List<IInstrumentationAdapter> instrumentationControllers; /** * Constructor. * * @param instrumentationControllers * instrumentation controllers to manage */ private InstrumentationBroker() { this.instrumentationControllers = new ArrayList<IInstrumentationAdapter>(); } /** * sets a collection of controllers. * * @param instrumentationControllers * controllers */ public void setControllers(Collection<IInstrumentationAdapter> instrumentationControllers) { this.instrumentationControllers.clear(); this.instrumentationControllers.addAll(instrumentationControllers); } @Override public void initialize() throws InstrumentationException { try { List<Future<?>> tasks = new ArrayList<>(); for (IInstrumentationAdapter instController : instrumentationControllers) { tasks.add(LpeSystemUtils.submitTask(new InitializeTask(instController))); } // wait for termination of all initialization tasks for (Future<?> task : tasks) { task.get(); } } catch (InterruptedException | ExecutionException e) { throw new InstrumentationException(e); } } @Override public void instrument(InstrumentationDescription description) throws InstrumentationException { try { if (description == null) { throw new InstrumentationException("Instrumentation description must not be null!"); } List<Future<?>> tasks = new ArrayList<>(); for (IInstrumentationAdapter instController : instrumentationControllers) { tasks.add(LpeSystemUtils.submitTask(new InstrumentTask(instController, description))); } // wait for termination of all instrumentation tasks for (Future<?> task : tasks) { task.get(); } } catch (InterruptedException | ExecutionException e) { throw new InstrumentationException(e); } } @Override public void uninstrument() throws InstrumentationException { try { List<Future<?>> tasks = new ArrayList<>(); for (IInstrumentationAdapter instController : instrumentationControllers) { tasks.add(LpeSystemUtils.submitTask(new UninstrumentTask(instController))); } // wait for termination of all uninstrumentation tasks for (Future<?> task : tasks) { task.get(); } } catch (InterruptedException | ExecutionException e) { throw new InstrumentationException(e); } } @Override public Properties getProperties() { Properties props = new Properties(); for (IInstrumentationAdapter instController : instrumentationControllers) { props.putAll(instController.getProperties()); } return props; } @Override public IExtension<?> getProvider() { return null; } private abstract class Task implements Runnable { @Override public void run() { try { executeTask(); } catch (Exception e) { throw new RuntimeException(e); } } protected abstract void executeTask() throws InstrumentationException; } private class InstrumentTask extends Task { IInstrumentationAdapter instController; InstrumentationDescription description; public InstrumentTask(IInstrumentationAdapter instController, InstrumentationDescription description) throws InterruptedException { this.instController = instController; this.description = description; } @Override protected void executeTask() throws InstrumentationException { String csListIncludes = instController.getProperties().getProperty( IInstrumentationAdapter.INSTRUMENTATION_INCLUDES); csListIncludes = (csListIncludes == null || csListIncludes.isEmpty()) ? null : csListIncludes; if (csListIncludes != null) { String[] includesArr = csListIncludes.split(","); for (String inc : includesArr) { description.getGlobalRestriction().getPackageIncludes().add(inc); } } String csListExcludes = instController.getProperties().getProperty( IInstrumentationAdapter.INSTRUMENTATION_EXCLUDES); csListExcludes = (csListExcludes == null || csListExcludes.isEmpty()) ? null : csListExcludes; if (csListExcludes != null) { String[] excludesArr = csListExcludes.split(","); for (String exc : excludesArr) { description.getGlobalRestriction().getPackageExcludes().add(exc); } } instController.instrument(description); } } private class UninstrumentTask extends Task { IInstrumentationAdapter instController; public UninstrumentTask(IInstrumentationAdapter instController) throws InterruptedException { this.instController = instController; } @Override protected void executeTask() throws InstrumentationException { instController.uninstrument(); } } private class InitializeTask extends Task { private IInstrumentationAdapter instController; public InitializeTask(IInstrumentationAdapter instController) throws InterruptedException { this.instController = instController; } @Override protected void executeTask() throws InstrumentationException { instController.initialize(); } } @Override public String getName() { return "Instrumentation Broker"; } @Override public String getPort() { return "NA"; } @Override public String getHost() { return "localhost"; } @Override public void setProperties(Properties properties) { // nothing to do } /** * Returns a list of instrumentation controllers of the given type. * * @param type * type of interest * @return list of instrumentation controllers of the given type * @param <T> * Class type of the controllers */ @SuppressWarnings("unchecked") public <T extends IInstrumentationAdapter> List<T> getInstrumentationControllers(Class<T> type) { List<T> result = new ArrayList<>(); for (IInstrumentationAdapter controller : instrumentationControllers) { if (type.isAssignableFrom(controller.getClass())) { result.add((T) controller); } } return result; } }
CloudScale-Project/DynamicSpotter
org.spotter.core/src/org/spotter/core/instrumentation/InstrumentationBroker.java
Java
apache-2.0
7,375
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: Filter.proto package org.apache.hadoop.hbase.protobuf.generated; public final class FilterProtos { private FilterProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface FilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string name = 1; /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ com.google.protobuf.ByteString getNameBytes(); // optional bytes serialized_filter = 2; /** * <code>optional bytes serialized_filter = 2;</code> */ boolean hasSerializedFilter(); /** * <code>optional bytes serialized_filter = 2;</code> */ com.google.protobuf.ByteString getSerializedFilter(); } /** * Protobuf type {@code hbase.pb.Filter} */ public static final class Filter extends com.google.protobuf.GeneratedMessage implements FilterOrBuilder { // Use Filter.newBuilder() to construct. private Filter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private Filter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final Filter defaultInstance; public static Filter getDefaultInstance() { return defaultInstance; } public Filter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Filter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; serializedFilter_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class); } public static com.google.protobuf.Parser<Filter> PARSER = new com.google.protobuf.AbstractParser<Filter>() { public Filter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Filter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<Filter> getParserForType() { return PARSER; } private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional bytes serialized_filter = 2; public static final int SERIALIZED_FILTER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString serializedFilter_; /** * <code>optional bytes serialized_filter = 2;</code> */ public boolean hasSerializedFilter() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes serialized_filter = 2;</code> */ public com.google.protobuf.ByteString getSerializedFilter() { return serializedFilter_; } private void initFields() { name_ = ""; serializedFilter_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, serializedFilter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, serializedFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasSerializedFilter() == other.hasSerializedFilter()); if (hasSerializedFilter()) { result = result && getSerializedFilter() .equals(other.getSerializedFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasSerializedFilter()) { hash = (37 * hash) + SERIALIZED_FILTER_FIELD_NUMBER; hash = (53 * hash) + getSerializedFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.Filter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); serializedFilter_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.serializedFilter_ = serializedFilter_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasSerializedFilter()) { setSerializedFilter(other.getSerializedFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string name = 1; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } // optional bytes serialized_filter = 2; private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes serialized_filter = 2;</code> */ public boolean hasSerializedFilter() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes serialized_filter = 2;</code> */ public com.google.protobuf.ByteString getSerializedFilter() { return serializedFilter_; } /** * <code>optional bytes serialized_filter = 2;</code> */ public Builder setSerializedFilter(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; serializedFilter_ = value; onChanged(); return this; } /** * <code>optional bytes serialized_filter = 2;</code> */ public Builder clearSerializedFilter() { bitField0_ = (bitField0_ & ~0x00000002); serializedFilter_ = getDefaultInstance().getSerializedFilter(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.Filter) } static { defaultInstance = new Filter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.Filter) } public interface ColumnCountGetFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int32 limit = 1; /** * <code>required int32 limit = 1;</code> */ boolean hasLimit(); /** * <code>required int32 limit = 1;</code> */ int getLimit(); } /** * Protobuf type {@code hbase.pb.ColumnCountGetFilter} */ public static final class ColumnCountGetFilter extends com.google.protobuf.GeneratedMessage implements ColumnCountGetFilterOrBuilder { // Use ColumnCountGetFilter.newBuilder() to construct. private ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ColumnCountGetFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ColumnCountGetFilter defaultInstance; public static ColumnCountGetFilter getDefaultInstance() { return defaultInstance; } public ColumnCountGetFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnCountGetFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; limit_ = input.readInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); } public static com.google.protobuf.Parser<ColumnCountGetFilter> PARSER = new com.google.protobuf.AbstractParser<ColumnCountGetFilter>() { public ColumnCountGetFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ColumnCountGetFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ColumnCountGetFilter> getParserForType() { return PARSER; } private int bitField0_; // required int32 limit = 1; public static final int LIMIT_FIELD_NUMBER = 1; private int limit_; /** * <code>required int32 limit = 1;</code> */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int32 limit = 1;</code> */ public int getLimit() { return limit_; } private void initFields() { limit_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLimit()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, limit_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, limit_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj; boolean result = true; result = result && (hasLimit() == other.hasLimit()); if (hasLimit()) { result = result && (getLimit() == other.getLimit()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLimit()) { hash = (37 * hash) + LIMIT_FIELD_NUMBER; hash = (53 * hash) + getLimit(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ColumnCountGetFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); limit_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.limit_ = limit_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this; if (other.hasLimit()) { setLimit(other.getLimit()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasLimit()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required int32 limit = 1; private int limit_ ; /** * <code>required int32 limit = 1;</code> */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int32 limit = 1;</code> */ public int getLimit() { return limit_; } /** * <code>required int32 limit = 1;</code> */ public Builder setLimit(int value) { bitField0_ |= 0x00000001; limit_ = value; onChanged(); return this; } /** * <code>required int32 limit = 1;</code> */ public Builder clearLimit() { bitField0_ = (bitField0_ & ~0x00000001); limit_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnCountGetFilter) } static { defaultInstance = new ColumnCountGetFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnCountGetFilter) } public interface ColumnPaginationFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int32 limit = 1; /** * <code>required int32 limit = 1;</code> */ boolean hasLimit(); /** * <code>required int32 limit = 1;</code> */ int getLimit(); // optional int32 offset = 2; /** * <code>optional int32 offset = 2;</code> */ boolean hasOffset(); /** * <code>optional int32 offset = 2;</code> */ int getOffset(); // optional bytes column_offset = 3; /** * <code>optional bytes column_offset = 3;</code> */ boolean hasColumnOffset(); /** * <code>optional bytes column_offset = 3;</code> */ com.google.protobuf.ByteString getColumnOffset(); } /** * Protobuf type {@code hbase.pb.ColumnPaginationFilter} */ public static final class ColumnPaginationFilter extends com.google.protobuf.GeneratedMessage implements ColumnPaginationFilterOrBuilder { // Use ColumnPaginationFilter.newBuilder() to construct. private ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ColumnPaginationFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ColumnPaginationFilter defaultInstance; public static ColumnPaginationFilter getDefaultInstance() { return defaultInstance; } public ColumnPaginationFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnPaginationFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; limit_ = input.readInt32(); break; } case 16: { bitField0_ |= 0x00000002; offset_ = input.readInt32(); break; } case 26: { bitField0_ |= 0x00000004; columnOffset_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); } public static com.google.protobuf.Parser<ColumnPaginationFilter> PARSER = new com.google.protobuf.AbstractParser<ColumnPaginationFilter>() { public ColumnPaginationFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ColumnPaginationFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ColumnPaginationFilter> getParserForType() { return PARSER; } private int bitField0_; // required int32 limit = 1; public static final int LIMIT_FIELD_NUMBER = 1; private int limit_; /** * <code>required int32 limit = 1;</code> */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int32 limit = 1;</code> */ public int getLimit() { return limit_; } // optional int32 offset = 2; public static final int OFFSET_FIELD_NUMBER = 2; private int offset_; /** * <code>optional int32 offset = 2;</code> */ public boolean hasOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 offset = 2;</code> */ public int getOffset() { return offset_; } // optional bytes column_offset = 3; public static final int COLUMN_OFFSET_FIELD_NUMBER = 3; private com.google.protobuf.ByteString columnOffset_; /** * <code>optional bytes column_offset = 3;</code> */ public boolean hasColumnOffset() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes column_offset = 3;</code> */ public com.google.protobuf.ByteString getColumnOffset() { return columnOffset_; } private void initFields() { limit_ = 0; offset_ = 0; columnOffset_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLimit()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, limit_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, offset_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, columnOffset_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, limit_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, offset_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, columnOffset_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj; boolean result = true; result = result && (hasLimit() == other.hasLimit()); if (hasLimit()) { result = result && (getLimit() == other.getLimit()); } result = result && (hasOffset() == other.hasOffset()); if (hasOffset()) { result = result && (getOffset() == other.getOffset()); } result = result && (hasColumnOffset() == other.hasColumnOffset()); if (hasColumnOffset()) { result = result && getColumnOffset() .equals(other.getColumnOffset()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLimit()) { hash = (37 * hash) + LIMIT_FIELD_NUMBER; hash = (53 * hash) + getLimit(); } if (hasOffset()) { hash = (37 * hash) + OFFSET_FIELD_NUMBER; hash = (53 * hash) + getOffset(); } if (hasColumnOffset()) { hash = (37 * hash) + COLUMN_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getColumnOffset().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ColumnPaginationFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); limit_ = 0; bitField0_ = (bitField0_ & ~0x00000001); offset_ = 0; bitField0_ = (bitField0_ & ~0x00000002); columnOffset_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.limit_ = limit_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.offset_ = offset_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.columnOffset_ = columnOffset_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this; if (other.hasLimit()) { setLimit(other.getLimit()); } if (other.hasOffset()) { setOffset(other.getOffset()); } if (other.hasColumnOffset()) { setColumnOffset(other.getColumnOffset()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasLimit()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required int32 limit = 1; private int limit_ ; /** * <code>required int32 limit = 1;</code> */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int32 limit = 1;</code> */ public int getLimit() { return limit_; } /** * <code>required int32 limit = 1;</code> */ public Builder setLimit(int value) { bitField0_ |= 0x00000001; limit_ = value; onChanged(); return this; } /** * <code>required int32 limit = 1;</code> */ public Builder clearLimit() { bitField0_ = (bitField0_ & ~0x00000001); limit_ = 0; onChanged(); return this; } // optional int32 offset = 2; private int offset_ ; /** * <code>optional int32 offset = 2;</code> */ public boolean hasOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 offset = 2;</code> */ public int getOffset() { return offset_; } /** * <code>optional int32 offset = 2;</code> */ public Builder setOffset(int value) { bitField0_ |= 0x00000002; offset_ = value; onChanged(); return this; } /** * <code>optional int32 offset = 2;</code> */ public Builder clearOffset() { bitField0_ = (bitField0_ & ~0x00000002); offset_ = 0; onChanged(); return this; } // optional bytes column_offset = 3; private com.google.protobuf.ByteString columnOffset_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes column_offset = 3;</code> */ public boolean hasColumnOffset() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes column_offset = 3;</code> */ public com.google.protobuf.ByteString getColumnOffset() { return columnOffset_; } /** * <code>optional bytes column_offset = 3;</code> */ public Builder setColumnOffset(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; columnOffset_ = value; onChanged(); return this; } /** * <code>optional bytes column_offset = 3;</code> */ public Builder clearColumnOffset() { bitField0_ = (bitField0_ & ~0x00000004); columnOffset_ = getDefaultInstance().getColumnOffset(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnPaginationFilter) } static { defaultInstance = new ColumnPaginationFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnPaginationFilter) } public interface ColumnPrefixFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes prefix = 1; /** * <code>required bytes prefix = 1;</code> */ boolean hasPrefix(); /** * <code>required bytes prefix = 1;</code> */ com.google.protobuf.ByteString getPrefix(); } /** * Protobuf type {@code hbase.pb.ColumnPrefixFilter} */ public static final class ColumnPrefixFilter extends com.google.protobuf.GeneratedMessage implements ColumnPrefixFilterOrBuilder { // Use ColumnPrefixFilter.newBuilder() to construct. private ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ColumnPrefixFilter defaultInstance; public static ColumnPrefixFilter getDefaultInstance() { return defaultInstance; } public ColumnPrefixFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; prefix_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); } public static com.google.protobuf.Parser<ColumnPrefixFilter> PARSER = new com.google.protobuf.AbstractParser<ColumnPrefixFilter>() { public ColumnPrefixFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ColumnPrefixFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ColumnPrefixFilter> getParserForType() { return PARSER; } private int bitField0_; // required bytes prefix = 1; public static final int PREFIX_FIELD_NUMBER = 1; private com.google.protobuf.ByteString prefix_; /** * <code>required bytes prefix = 1;</code> */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes prefix = 1;</code> */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } private void initFields() { prefix_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasPrefix()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, prefix_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, prefix_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj; boolean result = true; result = result && (hasPrefix() == other.hasPrefix()); if (hasPrefix()) { result = result && getPrefix() .equals(other.getPrefix()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrefix()) { hash = (37 * hash) + PREFIX_FIELD_NUMBER; hash = (53 * hash) + getPrefix().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ColumnPrefixFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); prefix_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.prefix_ = prefix_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this; if (other.hasPrefix()) { setPrefix(other.getPrefix()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasPrefix()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes prefix = 1; private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes prefix = 1;</code> */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes prefix = 1;</code> */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } /** * <code>required bytes prefix = 1;</code> */ public Builder setPrefix(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; prefix_ = value; onChanged(); return this; } /** * <code>required bytes prefix = 1;</code> */ public Builder clearPrefix() { bitField0_ = (bitField0_ & ~0x00000001); prefix_ = getDefaultInstance().getPrefix(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnPrefixFilter) } static { defaultInstance = new ColumnPrefixFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnPrefixFilter) } public interface ColumnRangeFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes min_column = 1; /** * <code>optional bytes min_column = 1;</code> */ boolean hasMinColumn(); /** * <code>optional bytes min_column = 1;</code> */ com.google.protobuf.ByteString getMinColumn(); // optional bool min_column_inclusive = 2; /** * <code>optional bool min_column_inclusive = 2;</code> */ boolean hasMinColumnInclusive(); /** * <code>optional bool min_column_inclusive = 2;</code> */ boolean getMinColumnInclusive(); // optional bytes max_column = 3; /** * <code>optional bytes max_column = 3;</code> */ boolean hasMaxColumn(); /** * <code>optional bytes max_column = 3;</code> */ com.google.protobuf.ByteString getMaxColumn(); // optional bool max_column_inclusive = 4; /** * <code>optional bool max_column_inclusive = 4;</code> */ boolean hasMaxColumnInclusive(); /** * <code>optional bool max_column_inclusive = 4;</code> */ boolean getMaxColumnInclusive(); } /** * Protobuf type {@code hbase.pb.ColumnRangeFilter} */ public static final class ColumnRangeFilter extends com.google.protobuf.GeneratedMessage implements ColumnRangeFilterOrBuilder { // Use ColumnRangeFilter.newBuilder() to construct. private ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ColumnRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ColumnRangeFilter defaultInstance; public static ColumnRangeFilter getDefaultInstance() { return defaultInstance; } public ColumnRangeFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnRangeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; minColumn_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; minColumnInclusive_ = input.readBool(); break; } case 26: { bitField0_ |= 0x00000004; maxColumn_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; maxColumnInclusive_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); } public static com.google.protobuf.Parser<ColumnRangeFilter> PARSER = new com.google.protobuf.AbstractParser<ColumnRangeFilter>() { public ColumnRangeFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ColumnRangeFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ColumnRangeFilter> getParserForType() { return PARSER; } private int bitField0_; // optional bytes min_column = 1; public static final int MIN_COLUMN_FIELD_NUMBER = 1; private com.google.protobuf.ByteString minColumn_; /** * <code>optional bytes min_column = 1;</code> */ public boolean hasMinColumn() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes min_column = 1;</code> */ public com.google.protobuf.ByteString getMinColumn() { return minColumn_; } // optional bool min_column_inclusive = 2; public static final int MIN_COLUMN_INCLUSIVE_FIELD_NUMBER = 2; private boolean minColumnInclusive_; /** * <code>optional bool min_column_inclusive = 2;</code> */ public boolean hasMinColumnInclusive() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool min_column_inclusive = 2;</code> */ public boolean getMinColumnInclusive() { return minColumnInclusive_; } // optional bytes max_column = 3; public static final int MAX_COLUMN_FIELD_NUMBER = 3; private com.google.protobuf.ByteString maxColumn_; /** * <code>optional bytes max_column = 3;</code> */ public boolean hasMaxColumn() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes max_column = 3;</code> */ public com.google.protobuf.ByteString getMaxColumn() { return maxColumn_; } // optional bool max_column_inclusive = 4; public static final int MAX_COLUMN_INCLUSIVE_FIELD_NUMBER = 4; private boolean maxColumnInclusive_; /** * <code>optional bool max_column_inclusive = 4;</code> */ public boolean hasMaxColumnInclusive() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool max_column_inclusive = 4;</code> */ public boolean getMaxColumnInclusive() { return maxColumnInclusive_; } private void initFields() { minColumn_ = com.google.protobuf.ByteString.EMPTY; minColumnInclusive_ = false; maxColumn_ = com.google.protobuf.ByteString.EMPTY; maxColumnInclusive_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, minColumn_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, minColumnInclusive_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, maxColumn_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, maxColumnInclusive_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, minColumn_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, minColumnInclusive_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, maxColumn_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, maxColumnInclusive_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) obj; boolean result = true; result = result && (hasMinColumn() == other.hasMinColumn()); if (hasMinColumn()) { result = result && getMinColumn() .equals(other.getMinColumn()); } result = result && (hasMinColumnInclusive() == other.hasMinColumnInclusive()); if (hasMinColumnInclusive()) { result = result && (getMinColumnInclusive() == other.getMinColumnInclusive()); } result = result && (hasMaxColumn() == other.hasMaxColumn()); if (hasMaxColumn()) { result = result && getMaxColumn() .equals(other.getMaxColumn()); } result = result && (hasMaxColumnInclusive() == other.hasMaxColumnInclusive()); if (hasMaxColumnInclusive()) { result = result && (getMaxColumnInclusive() == other.getMaxColumnInclusive()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasMinColumn()) { hash = (37 * hash) + MIN_COLUMN_FIELD_NUMBER; hash = (53 * hash) + getMinColumn().hashCode(); } if (hasMinColumnInclusive()) { hash = (37 * hash) + MIN_COLUMN_INCLUSIVE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getMinColumnInclusive()); } if (hasMaxColumn()) { hash = (37 * hash) + MAX_COLUMN_FIELD_NUMBER; hash = (53 * hash) + getMaxColumn().hashCode(); } if (hasMaxColumnInclusive()) { hash = (37 * hash) + MAX_COLUMN_INCLUSIVE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getMaxColumnInclusive()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ColumnRangeFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); minColumn_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); minColumnInclusive_ = false; bitField0_ = (bitField0_ & ~0x00000002); maxColumn_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); maxColumnInclusive_ = false; bitField0_ = (bitField0_ & ~0x00000008); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.minColumn_ = minColumn_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.minColumnInclusive_ = minColumnInclusive_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.maxColumn_ = maxColumn_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.maxColumnInclusive_ = maxColumnInclusive_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance()) return this; if (other.hasMinColumn()) { setMinColumn(other.getMinColumn()); } if (other.hasMinColumnInclusive()) { setMinColumnInclusive(other.getMinColumnInclusive()); } if (other.hasMaxColumn()) { setMaxColumn(other.getMaxColumn()); } if (other.hasMaxColumnInclusive()) { setMaxColumnInclusive(other.getMaxColumnInclusive()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bytes min_column = 1; private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes min_column = 1;</code> */ public boolean hasMinColumn() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes min_column = 1;</code> */ public com.google.protobuf.ByteString getMinColumn() { return minColumn_; } /** * <code>optional bytes min_column = 1;</code> */ public Builder setMinColumn(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; minColumn_ = value; onChanged(); return this; } /** * <code>optional bytes min_column = 1;</code> */ public Builder clearMinColumn() { bitField0_ = (bitField0_ & ~0x00000001); minColumn_ = getDefaultInstance().getMinColumn(); onChanged(); return this; } // optional bool min_column_inclusive = 2; private boolean minColumnInclusive_ ; /** * <code>optional bool min_column_inclusive = 2;</code> */ public boolean hasMinColumnInclusive() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool min_column_inclusive = 2;</code> */ public boolean getMinColumnInclusive() { return minColumnInclusive_; } /** * <code>optional bool min_column_inclusive = 2;</code> */ public Builder setMinColumnInclusive(boolean value) { bitField0_ |= 0x00000002; minColumnInclusive_ = value; onChanged(); return this; } /** * <code>optional bool min_column_inclusive = 2;</code> */ public Builder clearMinColumnInclusive() { bitField0_ = (bitField0_ & ~0x00000002); minColumnInclusive_ = false; onChanged(); return this; } // optional bytes max_column = 3; private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes max_column = 3;</code> */ public boolean hasMaxColumn() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes max_column = 3;</code> */ public com.google.protobuf.ByteString getMaxColumn() { return maxColumn_; } /** * <code>optional bytes max_column = 3;</code> */ public Builder setMaxColumn(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; maxColumn_ = value; onChanged(); return this; } /** * <code>optional bytes max_column = 3;</code> */ public Builder clearMaxColumn() { bitField0_ = (bitField0_ & ~0x00000004); maxColumn_ = getDefaultInstance().getMaxColumn(); onChanged(); return this; } // optional bool max_column_inclusive = 4; private boolean maxColumnInclusive_ ; /** * <code>optional bool max_column_inclusive = 4;</code> */ public boolean hasMaxColumnInclusive() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool max_column_inclusive = 4;</code> */ public boolean getMaxColumnInclusive() { return maxColumnInclusive_; } /** * <code>optional bool max_column_inclusive = 4;</code> */ public Builder setMaxColumnInclusive(boolean value) { bitField0_ |= 0x00000008; maxColumnInclusive_ = value; onChanged(); return this; } /** * <code>optional bool max_column_inclusive = 4;</code> */ public Builder clearMaxColumnInclusive() { bitField0_ = (bitField0_ & ~0x00000008); maxColumnInclusive_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnRangeFilter) } static { defaultInstance = new ColumnRangeFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnRangeFilter) } public interface CompareFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.CompareType compare_op = 1; /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ boolean hasCompareOp(); /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); // optional .hbase.pb.Comparator comparator = 2; /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ boolean hasComparator(); /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); } /** * Protobuf type {@code hbase.pb.CompareFilter} */ public static final class CompareFilter extends com.google.protobuf.GeneratedMessage implements CompareFilterOrBuilder { // Use CompareFilter.newBuilder() to construct. private CompareFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private CompareFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CompareFilter defaultInstance; public static CompareFilter getDefaultInstance() { return defaultInstance; } public CompareFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CompareFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; compareOp_ = value; } break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = comparator_.toBuilder(); } comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(comparator_); comparator_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); } public static com.google.protobuf.Parser<CompareFilter> PARSER = new com.google.protobuf.AbstractParser<CompareFilter>() { public CompareFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CompareFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<CompareFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.CompareType compare_op = 1; public static final int COMPARE_OP_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } // optional .hbase.pb.Comparator comparator = 2; public static final int COMPARATOR_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public boolean hasComparator() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { return comparator_; } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { return comparator_; } private void initFields() { compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCompareOp()) { memoizedIsInitialized = 0; return false; } if (hasComparator()) { if (!getComparator().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, compareOp_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, comparator_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, compareOp_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, comparator_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) obj; boolean result = true; result = result && (hasCompareOp() == other.hasCompareOp()); if (hasCompareOp()) { result = result && (getCompareOp() == other.getCompareOp()); } result = result && (hasComparator() == other.hasComparator()); if (hasComparator()) { result = result && getComparator() .equals(other.getComparator()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareOp()) { hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getCompareOp()); } if (hasComparator()) { hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; hash = (53 * hash) + getComparator().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CompareFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getComparatorFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; bitField0_ = (bitField0_ & ~0x00000001); if (comparatorBuilder_ == null) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); } else { comparatorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.compareOp_ = compareOp_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (comparatorBuilder_ == null) { result.comparator_ = comparator_; } else { result.comparator_ = comparatorBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) return this; if (other.hasCompareOp()) { setCompareOp(other.getCompareOp()); } if (other.hasComparator()) { mergeComparator(other.getComparator()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCompareOp()) { return false; } if (hasComparator()) { if (!getComparator().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.CompareType compare_op = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; compareOp_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.CompareType compare_op = 1;</code> */ public Builder clearCompareOp() { bitField0_ = (bitField0_ & ~0x00000001); compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; onChanged(); return this; } // optional .hbase.pb.Comparator comparator = 2; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public boolean hasComparator() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { return comparator_; } else { return comparatorBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } comparator_ = value; onChanged(); } else { comparatorBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public Builder setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { if (comparatorBuilder_ == null) { comparator_ = builderForValue.build(); onChanged(); } else { comparatorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); } else { comparator_ = value; } onChanged(); } else { comparatorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public Builder clearComparator() { if (comparatorBuilder_ == null) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); onChanged(); } else { comparatorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getComparatorFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); } else { return comparator_; } } /** * <code>optional .hbase.pb.Comparator comparator = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { if (comparatorBuilder_ == null) { comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( comparator_, getParentForChildren(), isClean()); comparator_ = null; } return comparatorBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.CompareFilter) } static { defaultInstance = new CompareFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.CompareFilter) } public interface DependentColumnFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.CompareFilter compare_filter = 1; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ boolean hasCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); // optional bytes column_family = 2; /** * <code>optional bytes column_family = 2;</code> */ boolean hasColumnFamily(); /** * <code>optional bytes column_family = 2;</code> */ com.google.protobuf.ByteString getColumnFamily(); // optional bytes column_qualifier = 3; /** * <code>optional bytes column_qualifier = 3;</code> */ boolean hasColumnQualifier(); /** * <code>optional bytes column_qualifier = 3;</code> */ com.google.protobuf.ByteString getColumnQualifier(); // optional bool drop_dependent_column = 4; /** * <code>optional bool drop_dependent_column = 4;</code> */ boolean hasDropDependentColumn(); /** * <code>optional bool drop_dependent_column = 4;</code> */ boolean getDropDependentColumn(); } /** * Protobuf type {@code hbase.pb.DependentColumnFilter} */ public static final class DependentColumnFilter extends com.google.protobuf.GeneratedMessage implements DependentColumnFilterOrBuilder { // Use DependentColumnFilter.newBuilder() to construct. private DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DependentColumnFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DependentColumnFilter defaultInstance; public static DependentColumnFilter getDefaultInstance() { return defaultInstance; } public DependentColumnFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DependentColumnFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = compareFilter_.toBuilder(); } compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(compareFilter_); compareFilter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { bitField0_ |= 0x00000002; columnFamily_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; columnQualifier_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; dropDependentColumn_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); } public static com.google.protobuf.Parser<DependentColumnFilter> PARSER = new com.google.protobuf.AbstractParser<DependentColumnFilter>() { public DependentColumnFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DependentColumnFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DependentColumnFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } // optional bytes column_family = 2; public static final int COLUMN_FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnFamily_; /** * <code>optional bytes column_family = 2;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes column_family = 2;</code> */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } // optional bytes column_qualifier = 3; public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString columnQualifier_; /** * <code>optional bytes column_qualifier = 3;</code> */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes column_qualifier = 3;</code> */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } // optional bool drop_dependent_column = 4; public static final int DROP_DEPENDENT_COLUMN_FIELD_NUMBER = 4; private boolean dropDependentColumn_; /** * <code>optional bool drop_dependent_column = 4;</code> */ public boolean hasDropDependentColumn() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool drop_dependent_column = 4;</code> */ public boolean getDropDependentColumn() { return dropDependentColumn_; } private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); columnFamily_ = com.google.protobuf.ByteString.EMPTY; columnQualifier_ = com.google.protobuf.ByteString.EMPTY; dropDependentColumn_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; } if (!getCompareFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, compareFilter_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, columnFamily_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, columnQualifier_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, dropDependentColumn_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, compareFilter_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, columnFamily_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, columnQualifier_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, dropDependentColumn_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) obj; boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { result = result && getCompareFilter() .equals(other.getCompareFilter()); } result = result && (hasColumnFamily() == other.hasColumnFamily()); if (hasColumnFamily()) { result = result && getColumnFamily() .equals(other.getColumnFamily()); } result = result && (hasColumnQualifier() == other.hasColumnQualifier()); if (hasColumnQualifier()) { result = result && getColumnQualifier() .equals(other.getColumnQualifier()); } result = result && (hasDropDependentColumn() == other.hasDropDependentColumn()); if (hasDropDependentColumn()) { result = result && (getDropDependentColumn() == other.getDropDependentColumn()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } if (hasColumnFamily()) { hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER; hash = (53 * hash) + getColumnFamily().hashCode(); } if (hasColumnQualifier()) { hash = (37 * hash) + COLUMN_QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getColumnQualifier().hashCode(); } if (hasDropDependentColumn()) { hash = (37 * hash) + DROP_DEPENDENT_COLUMN_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getDropDependentColumn()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.DependentColumnFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); columnFamily_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); columnQualifier_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); dropDependentColumn_ = false; bitField0_ = (bitField0_ & ~0x00000008); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (compareFilterBuilder_ == null) { result.compareFilter_ = compareFilter_; } else { result.compareFilter_ = compareFilterBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.columnFamily_ = columnFamily_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.columnQualifier_ = columnQualifier_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.dropDependentColumn_ = dropDependentColumn_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } if (other.hasColumnFamily()) { setColumnFamily(other.getColumnFamily()); } if (other.hasColumnQualifier()) { setColumnQualifier(other.getColumnQualifier()); } if (other.hasDropDependentColumn()) { setDropDependentColumn(other.getDropDependentColumn()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { return false; } if (!getCompareFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; } else { return compareFilterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } compareFilter_ = value; onChanged(); } else { compareFilterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { compareFilter_ = builderForValue.build(); onChanged(); } else { compareFilterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); } else { compareFilter_ = value; } onChanged(); } else { compareFilterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); onChanged(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { return compareFilter_; } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( compareFilter_, getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } // optional bytes column_family = 2; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes column_family = 2;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes column_family = 2;</code> */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } /** * <code>optional bytes column_family = 2;</code> */ public Builder setColumnFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; columnFamily_ = value; onChanged(); return this; } /** * <code>optional bytes column_family = 2;</code> */ public Builder clearColumnFamily() { bitField0_ = (bitField0_ & ~0x00000002); columnFamily_ = getDefaultInstance().getColumnFamily(); onChanged(); return this; } // optional bytes column_qualifier = 3; private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes column_qualifier = 3;</code> */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes column_qualifier = 3;</code> */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } /** * <code>optional bytes column_qualifier = 3;</code> */ public Builder setColumnQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; columnQualifier_ = value; onChanged(); return this; } /** * <code>optional bytes column_qualifier = 3;</code> */ public Builder clearColumnQualifier() { bitField0_ = (bitField0_ & ~0x00000004); columnQualifier_ = getDefaultInstance().getColumnQualifier(); onChanged(); return this; } // optional bool drop_dependent_column = 4; private boolean dropDependentColumn_ ; /** * <code>optional bool drop_dependent_column = 4;</code> */ public boolean hasDropDependentColumn() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool drop_dependent_column = 4;</code> */ public boolean getDropDependentColumn() { return dropDependentColumn_; } /** * <code>optional bool drop_dependent_column = 4;</code> */ public Builder setDropDependentColumn(boolean value) { bitField0_ |= 0x00000008; dropDependentColumn_ = value; onChanged(); return this; } /** * <code>optional bool drop_dependent_column = 4;</code> */ public Builder clearDropDependentColumn() { bitField0_ = (bitField0_ & ~0x00000008); dropDependentColumn_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.DependentColumnFilter) } static { defaultInstance = new DependentColumnFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.DependentColumnFilter) } public interface FamilyFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.CompareFilter compare_filter = 1; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ boolean hasCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.FamilyFilter} */ public static final class FamilyFilter extends com.google.protobuf.GeneratedMessage implements FamilyFilterOrBuilder { // Use FamilyFilter.newBuilder() to construct. private FamilyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FamilyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FamilyFilter defaultInstance; public static FamilyFilter getDefaultInstance() { return defaultInstance; } public FamilyFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FamilyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = compareFilter_.toBuilder(); } compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(compareFilter_); compareFilter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); } public static com.google.protobuf.Parser<FamilyFilter> PARSER = new com.google.protobuf.AbstractParser<FamilyFilter>() { public FamilyFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FamilyFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FamilyFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; } if (!getCompareFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, compareFilter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, compareFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) obj; boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { result = result && getCompareFilter() .equals(other.getCompareFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FamilyFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (compareFilterBuilder_ == null) { result.compareFilter_ = compareFilter_; } else { result.compareFilter_ = compareFilterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { return false; } if (!getCompareFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; } else { return compareFilterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } compareFilter_ = value; onChanged(); } else { compareFilterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { compareFilter_ = builderForValue.build(); onChanged(); } else { compareFilterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); } else { compareFilter_ = value; } onChanged(); } else { compareFilterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); onChanged(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { return compareFilter_; } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( compareFilter_, getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.FamilyFilter) } static { defaultInstance = new FamilyFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FamilyFilter) } public interface FilterListOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.FilterList.Operator operator = 1; /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ boolean hasOperator(); /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator(); // repeated .hbase.pb.Filter filters = 2; /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList(); /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index); /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ int getFiltersCount(); /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersOrBuilderList(); /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.FilterList} */ public static final class FilterList extends com.google.protobuf.GeneratedMessage implements FilterListOrBuilder { // Use FilterList.newBuilder() to construct. private FilterList(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FilterList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FilterList defaultInstance; public static FilterList getDefaultInstance() { return defaultInstance; } public FilterList getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FilterList( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; operator_ = value; } break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>(); mutable_bitField0_ |= 0x00000002; } filters_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { filters_ = java.util.Collections.unmodifiableList(filters_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); } public static com.google.protobuf.Parser<FilterList> PARSER = new com.google.protobuf.AbstractParser<FilterList>() { public FilterList parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FilterList(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FilterList> getParserForType() { return PARSER; } /** * Protobuf enum {@code hbase.pb.FilterList.Operator} */ public enum Operator implements com.google.protobuf.ProtocolMessageEnum { /** * <code>MUST_PASS_ALL = 1;</code> */ MUST_PASS_ALL(0, 1), /** * <code>MUST_PASS_ONE = 2;</code> */ MUST_PASS_ONE(1, 2), ; /** * <code>MUST_PASS_ALL = 1;</code> */ public static final int MUST_PASS_ALL_VALUE = 1; /** * <code>MUST_PASS_ONE = 2;</code> */ public static final int MUST_PASS_ONE_VALUE = 2; public final int getNumber() { return value; } public static Operator valueOf(int value) { switch (value) { case 1: return MUST_PASS_ALL; case 2: return MUST_PASS_ONE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Operator> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<Operator> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Operator>() { public Operator findValueByNumber(int number) { return Operator.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor().getEnumTypes().get(0); } private static final Operator[] VALUES = values(); public static Operator valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private Operator(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.FilterList.Operator) } private int bitField0_; // required .hbase.pb.FilterList.Operator operator = 1; public static final int OPERATOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_; /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ public boolean hasOperator() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { return operator_; } // repeated .hbase.pb.Filter filters = 2; public static final int FILTERS_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_; /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() { return filters_; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersOrBuilderList() { return filters_; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public int getFiltersCount() { return filters_.size(); } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) { return filters_.get(index); } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder( int index) { return filters_.get(index); } private void initFields() { operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; filters_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasOperator()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getFiltersCount(); i++) { if (!getFilters(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, operator_.getNumber()); } for (int i = 0; i < filters_.size(); i++) { output.writeMessage(2, filters_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, operator_.getNumber()); } for (int i = 0; i < filters_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, filters_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) obj; boolean result = true; result = result && (hasOperator() == other.hasOperator()); if (hasOperator()) { result = result && (getOperator() == other.getOperator()); } result = result && getFiltersList() .equals(other.getFiltersList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOperator()) { hash = (37 * hash) + OPERATOR_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getOperator()); } if (getFiltersCount() > 0) { hash = (37 * hash) + FILTERS_FIELD_NUMBER; hash = (53 * hash) + getFiltersList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FilterList} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFiltersFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; bitField0_ = (bitField0_ & ~0x00000001); if (filtersBuilder_ == null) { filters_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { filtersBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.operator_ = operator_; if (filtersBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { filters_ = java.util.Collections.unmodifiableList(filters_); bitField0_ = (bitField0_ & ~0x00000002); } result.filters_ = filters_; } else { result.filters_ = filtersBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance()) return this; if (other.hasOperator()) { setOperator(other.getOperator()); } if (filtersBuilder_ == null) { if (!other.filters_.isEmpty()) { if (filters_.isEmpty()) { filters_ = other.filters_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFiltersIsMutable(); filters_.addAll(other.filters_); } onChanged(); } } else { if (!other.filters_.isEmpty()) { if (filtersBuilder_.isEmpty()) { filtersBuilder_.dispose(); filtersBuilder_ = null; filters_ = other.filters_; bitField0_ = (bitField0_ & ~0x00000002); filtersBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFiltersFieldBuilder() : null; } else { filtersBuilder_.addAllMessages(other.filters_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasOperator()) { return false; } for (int i = 0; i < getFiltersCount(); i++) { if (!getFilters(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.FilterList.Operator operator = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ public boolean hasOperator() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { return operator_; } /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ public Builder setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; operator_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.FilterList.Operator operator = 1;</code> */ public Builder clearOperator() { bitField0_ = (bitField0_ & ~0x00000001); operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; onChanged(); return this; } // repeated .hbase.pb.Filter filters = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_ = java.util.Collections.emptyList(); private void ensureFiltersIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>(filters_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filtersBuilder_; /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() { if (filtersBuilder_ == null) { return java.util.Collections.unmodifiableList(filters_); } else { return filtersBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public int getFiltersCount() { if (filtersBuilder_ == null) { return filters_.size(); } else { return filtersBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) { if (filtersBuilder_ == null) { return filters_.get(index); } else { return filtersBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filtersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFiltersIsMutable(); filters_.set(index, value); onChanged(); } else { filtersBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filtersBuilder_ == null) { ensureFiltersIsMutable(); filters_.set(index, builderForValue.build()); onChanged(); } else { filtersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder addFilters(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filtersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFiltersIsMutable(); filters_.add(value); onChanged(); } else { filtersBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filtersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFiltersIsMutable(); filters_.add(index, value); onChanged(); } else { filtersBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder addFilters( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filtersBuilder_ == null) { ensureFiltersIsMutable(); filters_.add(builderForValue.build()); onChanged(); } else { filtersBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filtersBuilder_ == null) { ensureFiltersIsMutable(); filters_.add(index, builderForValue.build()); onChanged(); } else { filtersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder addAllFilters( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> values) { if (filtersBuilder_ == null) { ensureFiltersIsMutable(); super.addAll(values, filters_); onChanged(); } else { filtersBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder clearFilters() { if (filtersBuilder_ == null) { filters_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { filtersBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public Builder removeFilters(int index) { if (filtersBuilder_ == null) { ensureFiltersIsMutable(); filters_.remove(index); onChanged(); } else { filtersBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFiltersBuilder( int index) { return getFiltersFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder( int index) { if (filtersBuilder_ == null) { return filters_.get(index); } else { return filtersBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersOrBuilderList() { if (filtersBuilder_ != null) { return filtersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(filters_); } } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder() { return getFiltersFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder( int index) { return getFiltersFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Filter filters = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder> getFiltersBuilderList() { return getFiltersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersFieldBuilder() { if (filtersBuilder_ == null) { filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( filters_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); filters_ = null; } return filtersBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.FilterList) } static { defaultInstance = new FilterList(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FilterList) } public interface FilterWrapperOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.Filter filter = 1; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ boolean hasFilter(); /** * <code>required .hbase.pb.Filter filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter(); /** * <code>required .hbase.pb.Filter filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.FilterWrapper} */ public static final class FilterWrapper extends com.google.protobuf.GeneratedMessage implements FilterWrapperOrBuilder { // Use FilterWrapper.newBuilder() to construct. private FilterWrapper(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FilterWrapper(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FilterWrapper defaultInstance; public static FilterWrapper getDefaultInstance() { return defaultInstance; } public FilterWrapper getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FilterWrapper( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = filter_.toBuilder(); } filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(filter_); filter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); } public static com.google.protobuf.Parser<FilterWrapper> PARSER = new com.google.protobuf.AbstractParser<FilterWrapper>() { public FilterWrapper parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FilterWrapper(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FilterWrapper> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { return filter_; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } private void initFields() { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFilter()) { memoizedIsInitialized = 0; return false; } if (!getFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, filter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) obj; boolean result = true; result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { result = result && getFilter() .equals(other.getFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFilter()) { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FilterWrapper} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapperOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (filterBuilder_ == null) { result.filter_ = filter_; } else { result.filter_ = filterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance()) return this; if (other.hasFilter()) { mergeFilter(other.getFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFilter()) { return false; } if (!getFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.Filter filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; } else { return filterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); } else { filterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); onChanged(); } else { filterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); } else { filter_ = value; } onChanged(); } else { filterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); onChanged(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { return filter_; } } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( filter_, getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.FilterWrapper) } static { defaultInstance = new FilterWrapper(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FilterWrapper) } public interface FirstKeyOnlyFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.FirstKeyOnlyFilter} */ public static final class FirstKeyOnlyFilter extends com.google.protobuf.GeneratedMessage implements FirstKeyOnlyFilterOrBuilder { // Use FirstKeyOnlyFilter.newBuilder() to construct. private FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FirstKeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FirstKeyOnlyFilter defaultInstance; public static FirstKeyOnlyFilter getDefaultInstance() { return defaultInstance; } public FirstKeyOnlyFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FirstKeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); } public static com.google.protobuf.Parser<FirstKeyOnlyFilter> PARSER = new com.google.protobuf.AbstractParser<FirstKeyOnlyFilter>() { public FirstKeyOnlyFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FirstKeyOnlyFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FirstKeyOnlyFilter> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FirstKeyOnlyFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.FirstKeyOnlyFilter) } static { defaultInstance = new FirstKeyOnlyFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyOnlyFilter) } public interface FirstKeyValueMatchingQualifiersFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated bytes qualifiers = 1; /** * <code>repeated bytes qualifiers = 1;</code> */ java.util.List<com.google.protobuf.ByteString> getQualifiersList(); /** * <code>repeated bytes qualifiers = 1;</code> */ int getQualifiersCount(); /** * <code>repeated bytes qualifiers = 1;</code> */ com.google.protobuf.ByteString getQualifiers(int index); } /** * Protobuf type {@code hbase.pb.FirstKeyValueMatchingQualifiersFilter} */ public static final class FirstKeyValueMatchingQualifiersFilter extends com.google.protobuf.GeneratedMessage implements FirstKeyValueMatchingQualifiersFilterOrBuilder { // Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct. private FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FirstKeyValueMatchingQualifiersFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FirstKeyValueMatchingQualifiersFilter defaultInstance; public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() { return defaultInstance; } public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FirstKeyValueMatchingQualifiersFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>(); mutable_bitField0_ |= 0x00000001; } qualifiers_.add(input.readBytes()); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); } public static com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> PARSER = new com.google.protobuf.AbstractParser<FirstKeyValueMatchingQualifiersFilter>() { public FirstKeyValueMatchingQualifiersFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FirstKeyValueMatchingQualifiersFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> getParserForType() { return PARSER; } // repeated bytes qualifiers = 1; public static final int QUALIFIERS_FIELD_NUMBER = 1; private java.util.List<com.google.protobuf.ByteString> qualifiers_; /** * <code>repeated bytes qualifiers = 1;</code> */ public java.util.List<com.google.protobuf.ByteString> getQualifiersList() { return qualifiers_; } /** * <code>repeated bytes qualifiers = 1;</code> */ public int getQualifiersCount() { return qualifiers_.size(); } /** * <code>repeated bytes qualifiers = 1;</code> */ public com.google.protobuf.ByteString getQualifiers(int index) { return qualifiers_.get(index); } private void initFields() { qualifiers_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < qualifiers_.size(); i++) { output.writeBytes(1, qualifiers_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < qualifiers_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(qualifiers_.get(i)); } size += dataSize; size += 1 * getQualifiersList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) obj; boolean result = true; result = result && getQualifiersList() .equals(other.getQualifiersList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getQualifiersCount() > 0) { hash = (37 * hash) + QUALIFIERS_FIELD_NUMBER; hash = (53 * hash) + getQualifiersList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FirstKeyValueMatchingQualifiersFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); qualifiers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); bitField0_ = (bitField0_ & ~0x00000001); } result.qualifiers_ = qualifiers_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance()) return this; if (!other.qualifiers_.isEmpty()) { if (qualifiers_.isEmpty()) { qualifiers_ = other.qualifiers_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureQualifiersIsMutable(); qualifiers_.addAll(other.qualifiers_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated bytes qualifiers = 1; private java.util.List<com.google.protobuf.ByteString> qualifiers_ = java.util.Collections.emptyList(); private void ensureQualifiersIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifiers_); bitField0_ |= 0x00000001; } } /** * <code>repeated bytes qualifiers = 1;</code> */ public java.util.List<com.google.protobuf.ByteString> getQualifiersList() { return java.util.Collections.unmodifiableList(qualifiers_); } /** * <code>repeated bytes qualifiers = 1;</code> */ public int getQualifiersCount() { return qualifiers_.size(); } /** * <code>repeated bytes qualifiers = 1;</code> */ public com.google.protobuf.ByteString getQualifiers(int index) { return qualifiers_.get(index); } /** * <code>repeated bytes qualifiers = 1;</code> */ public Builder setQualifiers( int index, com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureQualifiersIsMutable(); qualifiers_.set(index, value); onChanged(); return this; } /** * <code>repeated bytes qualifiers = 1;</code> */ public Builder addQualifiers(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureQualifiersIsMutable(); qualifiers_.add(value); onChanged(); return this; } /** * <code>repeated bytes qualifiers = 1;</code> */ public Builder addAllQualifiers( java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { ensureQualifiersIsMutable(); super.addAll(values, qualifiers_); onChanged(); return this; } /** * <code>repeated bytes qualifiers = 1;</code> */ public Builder clearQualifiers() { qualifiers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.FirstKeyValueMatchingQualifiersFilter) } static { defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyValueMatchingQualifiersFilter) } public interface FuzzyRowFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1; /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList(); /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index); /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ int getFuzzyKeysDataCount(); /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataOrBuilderList(); /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.FuzzyRowFilter} */ public static final class FuzzyRowFilter extends com.google.protobuf.GeneratedMessage implements FuzzyRowFilterOrBuilder { // Use FuzzyRowFilter.newBuilder() to construct. private FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FuzzyRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FuzzyRowFilter defaultInstance; public static FuzzyRowFilter getDefaultInstance() { return defaultInstance; } public FuzzyRowFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FuzzyRowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(); mutable_bitField0_ |= 0x00000001; } fuzzyKeysData_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); } public static com.google.protobuf.Parser<FuzzyRowFilter> PARSER = new com.google.protobuf.AbstractParser<FuzzyRowFilter>() { public FuzzyRowFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FuzzyRowFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FuzzyRowFilter> getParserForType() { return PARSER; } // repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1; public static final int FUZZY_KEYS_DATA_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_; /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() { return fuzzyKeysData_; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataOrBuilderList() { return fuzzyKeysData_; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public int getFuzzyKeysDataCount() { return fuzzyKeysData_.size(); } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { return fuzzyKeysData_.get(index); } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( int index) { return fuzzyKeysData_.get(index); } private void initFields() { fuzzyKeysData_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getFuzzyKeysDataCount(); i++) { if (!getFuzzyKeysData(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < fuzzyKeysData_.size(); i++) { output.writeMessage(1, fuzzyKeysData_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < fuzzyKeysData_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, fuzzyKeysData_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) obj; boolean result = true; result = result && getFuzzyKeysDataList() .equals(other.getFuzzyKeysDataList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getFuzzyKeysDataCount() > 0) { hash = (37 * hash) + FUZZY_KEYS_DATA_FIELD_NUMBER; hash = (53 * hash) + getFuzzyKeysDataList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FuzzyRowFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFuzzyKeysDataFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (fuzzyKeysDataBuilder_ == null) { fuzzyKeysData_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { fuzzyKeysDataBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter(this); int from_bitField0_ = bitField0_; if (fuzzyKeysDataBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); bitField0_ = (bitField0_ & ~0x00000001); } result.fuzzyKeysData_ = fuzzyKeysData_; } else { result.fuzzyKeysData_ = fuzzyKeysDataBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance()) return this; if (fuzzyKeysDataBuilder_ == null) { if (!other.fuzzyKeysData_.isEmpty()) { if (fuzzyKeysData_.isEmpty()) { fuzzyKeysData_ = other.fuzzyKeysData_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.addAll(other.fuzzyKeysData_); } onChanged(); } } else { if (!other.fuzzyKeysData_.isEmpty()) { if (fuzzyKeysDataBuilder_.isEmpty()) { fuzzyKeysDataBuilder_.dispose(); fuzzyKeysDataBuilder_ = null; fuzzyKeysData_ = other.fuzzyKeysData_; bitField0_ = (bitField0_ & ~0x00000001); fuzzyKeysDataBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFuzzyKeysDataFieldBuilder() : null; } else { fuzzyKeysDataBuilder_.addAllMessages(other.fuzzyKeysData_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getFuzzyKeysDataCount(); i++) { if (!getFuzzyKeysData(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_ = java.util.Collections.emptyList(); private void ensureFuzzyKeysDataIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(fuzzyKeysData_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_; /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() { if (fuzzyKeysDataBuilder_ == null) { return java.util.Collections.unmodifiableList(fuzzyKeysData_); } else { return fuzzyKeysDataBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public int getFuzzyKeysDataCount() { if (fuzzyKeysDataBuilder_ == null) { return fuzzyKeysData_.size(); } else { return fuzzyKeysDataBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { if (fuzzyKeysDataBuilder_ == null) { return fuzzyKeysData_.get(index); } else { return fuzzyKeysDataBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (fuzzyKeysDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.set(index, value); onChanged(); } else { fuzzyKeysDataBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (fuzzyKeysDataBuilder_ == null) { ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.set(index, builderForValue.build()); onChanged(); } else { fuzzyKeysDataBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (fuzzyKeysDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.add(value); onChanged(); } else { fuzzyKeysDataBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (fuzzyKeysDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.add(index, value); onChanged(); } else { fuzzyKeysDataBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder addFuzzyKeysData( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (fuzzyKeysDataBuilder_ == null) { ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.add(builderForValue.build()); onChanged(); } else { fuzzyKeysDataBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (fuzzyKeysDataBuilder_ == null) { ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.add(index, builderForValue.build()); onChanged(); } else { fuzzyKeysDataBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder addAllFuzzyKeysData( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) { if (fuzzyKeysDataBuilder_ == null) { ensureFuzzyKeysDataIsMutable(); super.addAll(values, fuzzyKeysData_); onChanged(); } else { fuzzyKeysDataBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder clearFuzzyKeysData() { if (fuzzyKeysDataBuilder_ == null) { fuzzyKeysData_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { fuzzyKeysDataBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public Builder removeFuzzyKeysData(int index) { if (fuzzyKeysDataBuilder_ == null) { ensureFuzzyKeysDataIsMutable(); fuzzyKeysData_.remove(index); onChanged(); } else { fuzzyKeysDataBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getFuzzyKeysDataBuilder( int index) { return getFuzzyKeysDataFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( int index) { if (fuzzyKeysDataBuilder_ == null) { return fuzzyKeysData_.get(index); } else { return fuzzyKeysDataBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataOrBuilderList() { if (fuzzyKeysDataBuilder_ != null) { return fuzzyKeysDataBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(fuzzyKeysData_); } } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder() { return getFuzzyKeysDataFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder( int index) { return getFuzzyKeysDataFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getFuzzyKeysDataBuilderList() { return getFuzzyKeysDataFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataFieldBuilder() { if (fuzzyKeysDataBuilder_ == null) { fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( fuzzyKeysData_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); fuzzyKeysData_ = null; } return fuzzyKeysDataBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.FuzzyRowFilter) } static { defaultInstance = new FuzzyRowFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FuzzyRowFilter) } public interface InclusiveStopFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes stop_row_key = 1; /** * <code>optional bytes stop_row_key = 1;</code> */ boolean hasStopRowKey(); /** * <code>optional bytes stop_row_key = 1;</code> */ com.google.protobuf.ByteString getStopRowKey(); } /** * Protobuf type {@code hbase.pb.InclusiveStopFilter} */ public static final class InclusiveStopFilter extends com.google.protobuf.GeneratedMessage implements InclusiveStopFilterOrBuilder { // Use InclusiveStopFilter.newBuilder() to construct. private InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private InclusiveStopFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final InclusiveStopFilter defaultInstance; public static InclusiveStopFilter getDefaultInstance() { return defaultInstance; } public InclusiveStopFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private InclusiveStopFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; stopRowKey_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); } public static com.google.protobuf.Parser<InclusiveStopFilter> PARSER = new com.google.protobuf.AbstractParser<InclusiveStopFilter>() { public InclusiveStopFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new InclusiveStopFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<InclusiveStopFilter> getParserForType() { return PARSER; } private int bitField0_; // optional bytes stop_row_key = 1; public static final int STOP_ROW_KEY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString stopRowKey_; /** * <code>optional bytes stop_row_key = 1;</code> */ public boolean hasStopRowKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes stop_row_key = 1;</code> */ public com.google.protobuf.ByteString getStopRowKey() { return stopRowKey_; } private void initFields() { stopRowKey_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, stopRowKey_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, stopRowKey_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) obj; boolean result = true; result = result && (hasStopRowKey() == other.hasStopRowKey()); if (hasStopRowKey()) { result = result && getStopRowKey() .equals(other.getStopRowKey()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasStopRowKey()) { hash = (37 * hash) + STOP_ROW_KEY_FIELD_NUMBER; hash = (53 * hash) + getStopRowKey().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.InclusiveStopFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); stopRowKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.stopRowKey_ = stopRowKey_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance()) return this; if (other.hasStopRowKey()) { setStopRowKey(other.getStopRowKey()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bytes stop_row_key = 1; private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes stop_row_key = 1;</code> */ public boolean hasStopRowKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes stop_row_key = 1;</code> */ public com.google.protobuf.ByteString getStopRowKey() { return stopRowKey_; } /** * <code>optional bytes stop_row_key = 1;</code> */ public Builder setStopRowKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; stopRowKey_ = value; onChanged(); return this; } /** * <code>optional bytes stop_row_key = 1;</code> */ public Builder clearStopRowKey() { bitField0_ = (bitField0_ & ~0x00000001); stopRowKey_ = getDefaultInstance().getStopRowKey(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.InclusiveStopFilter) } static { defaultInstance = new InclusiveStopFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.InclusiveStopFilter) } public interface KeyOnlyFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool len_as_val = 1; /** * <code>required bool len_as_val = 1;</code> */ boolean hasLenAsVal(); /** * <code>required bool len_as_val = 1;</code> */ boolean getLenAsVal(); } /** * Protobuf type {@code hbase.pb.KeyOnlyFilter} */ public static final class KeyOnlyFilter extends com.google.protobuf.GeneratedMessage implements KeyOnlyFilterOrBuilder { // Use KeyOnlyFilter.newBuilder() to construct. private KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private KeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final KeyOnlyFilter defaultInstance; public static KeyOnlyFilter getDefaultInstance() { return defaultInstance; } public KeyOnlyFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private KeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; lenAsVal_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); } public static com.google.protobuf.Parser<KeyOnlyFilter> PARSER = new com.google.protobuf.AbstractParser<KeyOnlyFilter>() { public KeyOnlyFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new KeyOnlyFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<KeyOnlyFilter> getParserForType() { return PARSER; } private int bitField0_; // required bool len_as_val = 1; public static final int LEN_AS_VAL_FIELD_NUMBER = 1; private boolean lenAsVal_; /** * <code>required bool len_as_val = 1;</code> */ public boolean hasLenAsVal() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool len_as_val = 1;</code> */ public boolean getLenAsVal() { return lenAsVal_; } private void initFields() { lenAsVal_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLenAsVal()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, lenAsVal_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, lenAsVal_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) obj; boolean result = true; result = result && (hasLenAsVal() == other.hasLenAsVal()); if (hasLenAsVal()) { result = result && (getLenAsVal() == other.getLenAsVal()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLenAsVal()) { hash = (37 * hash) + LEN_AS_VAL_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getLenAsVal()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.KeyOnlyFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); lenAsVal_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.lenAsVal_ = lenAsVal_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance()) return this; if (other.hasLenAsVal()) { setLenAsVal(other.getLenAsVal()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasLenAsVal()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bool len_as_val = 1; private boolean lenAsVal_ ; /** * <code>required bool len_as_val = 1;</code> */ public boolean hasLenAsVal() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool len_as_val = 1;</code> */ public boolean getLenAsVal() { return lenAsVal_; } /** * <code>required bool len_as_val = 1;</code> */ public Builder setLenAsVal(boolean value) { bitField0_ |= 0x00000001; lenAsVal_ = value; onChanged(); return this; } /** * <code>required bool len_as_val = 1;</code> */ public Builder clearLenAsVal() { bitField0_ = (bitField0_ & ~0x00000001); lenAsVal_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.KeyOnlyFilter) } static { defaultInstance = new KeyOnlyFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.KeyOnlyFilter) } public interface MultipleColumnPrefixFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated bytes sorted_prefixes = 1; /** * <code>repeated bytes sorted_prefixes = 1;</code> */ java.util.List<com.google.protobuf.ByteString> getSortedPrefixesList(); /** * <code>repeated bytes sorted_prefixes = 1;</code> */ int getSortedPrefixesCount(); /** * <code>repeated bytes sorted_prefixes = 1;</code> */ com.google.protobuf.ByteString getSortedPrefixes(int index); } /** * Protobuf type {@code hbase.pb.MultipleColumnPrefixFilter} */ public static final class MultipleColumnPrefixFilter extends com.google.protobuf.GeneratedMessage implements MultipleColumnPrefixFilterOrBuilder { // Use MultipleColumnPrefixFilter.newBuilder() to construct. private MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private MultipleColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final MultipleColumnPrefixFilter defaultInstance; public static MultipleColumnPrefixFilter getDefaultInstance() { return defaultInstance; } public MultipleColumnPrefixFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MultipleColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { sortedPrefixes_ = new java.util.ArrayList<com.google.protobuf.ByteString>(); mutable_bitField0_ |= 0x00000001; } sortedPrefixes_.add(input.readBytes()); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); } public static com.google.protobuf.Parser<MultipleColumnPrefixFilter> PARSER = new com.google.protobuf.AbstractParser<MultipleColumnPrefixFilter>() { public MultipleColumnPrefixFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MultipleColumnPrefixFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<MultipleColumnPrefixFilter> getParserForType() { return PARSER; } // repeated bytes sorted_prefixes = 1; public static final int SORTED_PREFIXES_FIELD_NUMBER = 1; private java.util.List<com.google.protobuf.ByteString> sortedPrefixes_; /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public java.util.List<com.google.protobuf.ByteString> getSortedPrefixesList() { return sortedPrefixes_; } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public int getSortedPrefixesCount() { return sortedPrefixes_.size(); } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public com.google.protobuf.ByteString getSortedPrefixes(int index) { return sortedPrefixes_.get(index); } private void initFields() { sortedPrefixes_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < sortedPrefixes_.size(); i++) { output.writeBytes(1, sortedPrefixes_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < sortedPrefixes_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(sortedPrefixes_.get(i)); } size += dataSize; size += 1 * getSortedPrefixesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) obj; boolean result = true; result = result && getSortedPrefixesList() .equals(other.getSortedPrefixesList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getSortedPrefixesCount() > 0) { hash = (37 * hash) + SORTED_PREFIXES_FIELD_NUMBER; hash = (53 * hash) + getSortedPrefixesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.MultipleColumnPrefixFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); sortedPrefixes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_); bitField0_ = (bitField0_ & ~0x00000001); } result.sortedPrefixes_ = sortedPrefixes_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance()) return this; if (!other.sortedPrefixes_.isEmpty()) { if (sortedPrefixes_.isEmpty()) { sortedPrefixes_ = other.sortedPrefixes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSortedPrefixesIsMutable(); sortedPrefixes_.addAll(other.sortedPrefixes_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated bytes sorted_prefixes = 1; private java.util.List<com.google.protobuf.ByteString> sortedPrefixes_ = java.util.Collections.emptyList(); private void ensureSortedPrefixesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { sortedPrefixes_ = new java.util.ArrayList<com.google.protobuf.ByteString>(sortedPrefixes_); bitField0_ |= 0x00000001; } } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public java.util.List<com.google.protobuf.ByteString> getSortedPrefixesList() { return java.util.Collections.unmodifiableList(sortedPrefixes_); } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public int getSortedPrefixesCount() { return sortedPrefixes_.size(); } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public com.google.protobuf.ByteString getSortedPrefixes(int index) { return sortedPrefixes_.get(index); } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public Builder setSortedPrefixes( int index, com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureSortedPrefixesIsMutable(); sortedPrefixes_.set(index, value); onChanged(); return this; } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public Builder addSortedPrefixes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureSortedPrefixesIsMutable(); sortedPrefixes_.add(value); onChanged(); return this; } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public Builder addAllSortedPrefixes( java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { ensureSortedPrefixesIsMutable(); super.addAll(values, sortedPrefixes_); onChanged(); return this; } /** * <code>repeated bytes sorted_prefixes = 1;</code> */ public Builder clearSortedPrefixes() { sortedPrefixes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.MultipleColumnPrefixFilter) } static { defaultInstance = new MultipleColumnPrefixFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.MultipleColumnPrefixFilter) } public interface PageFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int64 page_size = 1; /** * <code>required int64 page_size = 1;</code> */ boolean hasPageSize(); /** * <code>required int64 page_size = 1;</code> */ long getPageSize(); } /** * Protobuf type {@code hbase.pb.PageFilter} */ public static final class PageFilter extends com.google.protobuf.GeneratedMessage implements PageFilterOrBuilder { // Use PageFilter.newBuilder() to construct. private PageFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private PageFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final PageFilter defaultInstance; public static PageFilter getDefaultInstance() { return defaultInstance; } public PageFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private PageFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; pageSize_ = input.readInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class); } public static com.google.protobuf.Parser<PageFilter> PARSER = new com.google.protobuf.AbstractParser<PageFilter>() { public PageFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new PageFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<PageFilter> getParserForType() { return PARSER; } private int bitField0_; // required int64 page_size = 1; public static final int PAGE_SIZE_FIELD_NUMBER = 1; private long pageSize_; /** * <code>required int64 page_size = 1;</code> */ public boolean hasPageSize() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 page_size = 1;</code> */ public long getPageSize() { return pageSize_; } private void initFields() { pageSize_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasPageSize()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, pageSize_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, pageSize_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) obj; boolean result = true; result = result && (hasPageSize() == other.hasPageSize()); if (hasPageSize()) { result = result && (getPageSize() == other.getPageSize()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPageSize()) { hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + hashLong(getPageSize()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.PageFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); pageSize_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.pageSize_ = pageSize_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance()) return this; if (other.hasPageSize()) { setPageSize(other.getPageSize()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasPageSize()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required int64 page_size = 1; private long pageSize_ ; /** * <code>required int64 page_size = 1;</code> */ public boolean hasPageSize() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 page_size = 1;</code> */ public long getPageSize() { return pageSize_; } /** * <code>required int64 page_size = 1;</code> */ public Builder setPageSize(long value) { bitField0_ |= 0x00000001; pageSize_ = value; onChanged(); return this; } /** * <code>required int64 page_size = 1;</code> */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000001); pageSize_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.PageFilter) } static { defaultInstance = new PageFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.PageFilter) } public interface PrefixFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes prefix = 1; /** * <code>optional bytes prefix = 1;</code> */ boolean hasPrefix(); /** * <code>optional bytes prefix = 1;</code> */ com.google.protobuf.ByteString getPrefix(); } /** * Protobuf type {@code hbase.pb.PrefixFilter} */ public static final class PrefixFilter extends com.google.protobuf.GeneratedMessage implements PrefixFilterOrBuilder { // Use PrefixFilter.newBuilder() to construct. private PrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private PrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final PrefixFilter defaultInstance; public static PrefixFilter getDefaultInstance() { return defaultInstance; } public PrefixFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private PrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; prefix_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); } public static com.google.protobuf.Parser<PrefixFilter> PARSER = new com.google.protobuf.AbstractParser<PrefixFilter>() { public PrefixFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new PrefixFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<PrefixFilter> getParserForType() { return PARSER; } private int bitField0_; // optional bytes prefix = 1; public static final int PREFIX_FIELD_NUMBER = 1; private com.google.protobuf.ByteString prefix_; /** * <code>optional bytes prefix = 1;</code> */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes prefix = 1;</code> */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } private void initFields() { prefix_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, prefix_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, prefix_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) obj; boolean result = true; result = result && (hasPrefix() == other.hasPrefix()); if (hasPrefix()) { result = result && getPrefix() .equals(other.getPrefix()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrefix()) { hash = (37 * hash) + PREFIX_FIELD_NUMBER; hash = (53 * hash) + getPrefix().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.PrefixFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); prefix_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.prefix_ = prefix_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance()) return this; if (other.hasPrefix()) { setPrefix(other.getPrefix()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bytes prefix = 1; private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes prefix = 1;</code> */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes prefix = 1;</code> */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } /** * <code>optional bytes prefix = 1;</code> */ public Builder setPrefix(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; prefix_ = value; onChanged(); return this; } /** * <code>optional bytes prefix = 1;</code> */ public Builder clearPrefix() { bitField0_ = (bitField0_ & ~0x00000001); prefix_ = getDefaultInstance().getPrefix(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.PrefixFilter) } static { defaultInstance = new PrefixFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.PrefixFilter) } public interface QualifierFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.CompareFilter compare_filter = 1; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ boolean hasCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.QualifierFilter} */ public static final class QualifierFilter extends com.google.protobuf.GeneratedMessage implements QualifierFilterOrBuilder { // Use QualifierFilter.newBuilder() to construct. private QualifierFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private QualifierFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final QualifierFilter defaultInstance; public static QualifierFilter getDefaultInstance() { return defaultInstance; } public QualifierFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private QualifierFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = compareFilter_.toBuilder(); } compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(compareFilter_); compareFilter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); } public static com.google.protobuf.Parser<QualifierFilter> PARSER = new com.google.protobuf.AbstractParser<QualifierFilter>() { public QualifierFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new QualifierFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<QualifierFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; } if (!getCompareFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, compareFilter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, compareFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) obj; boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { result = result && getCompareFilter() .equals(other.getCompareFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.QualifierFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (compareFilterBuilder_ == null) { result.compareFilter_ = compareFilter_; } else { result.compareFilter_ = compareFilterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { return false; } if (!getCompareFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; } else { return compareFilterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } compareFilter_ = value; onChanged(); } else { compareFilterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { compareFilter_ = builderForValue.build(); onChanged(); } else { compareFilterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); } else { compareFilter_ = value; } onChanged(); } else { compareFilterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); onChanged(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { return compareFilter_; } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( compareFilter_, getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.QualifierFilter) } static { defaultInstance = new QualifierFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.QualifierFilter) } public interface RandomRowFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required float chance = 1; /** * <code>required float chance = 1;</code> */ boolean hasChance(); /** * <code>required float chance = 1;</code> */ float getChance(); } /** * Protobuf type {@code hbase.pb.RandomRowFilter} */ public static final class RandomRowFilter extends com.google.protobuf.GeneratedMessage implements RandomRowFilterOrBuilder { // Use RandomRowFilter.newBuilder() to construct. private RandomRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RandomRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RandomRowFilter defaultInstance; public static RandomRowFilter getDefaultInstance() { return defaultInstance; } public RandomRowFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RandomRowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 13: { bitField0_ |= 0x00000001; chance_ = input.readFloat(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); } public static com.google.protobuf.Parser<RandomRowFilter> PARSER = new com.google.protobuf.AbstractParser<RandomRowFilter>() { public RandomRowFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RandomRowFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RandomRowFilter> getParserForType() { return PARSER; } private int bitField0_; // required float chance = 1; public static final int CHANCE_FIELD_NUMBER = 1; private float chance_; /** * <code>required float chance = 1;</code> */ public boolean hasChance() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required float chance = 1;</code> */ public float getChance() { return chance_; } private void initFields() { chance_ = 0F; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasChance()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeFloat(1, chance_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(1, chance_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) obj; boolean result = true; result = result && (hasChance() == other.hasChance()); if (hasChance()) { result = result && (Float.floatToIntBits(getChance()) == Float.floatToIntBits(other.getChance())); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasChance()) { hash = (37 * hash) + CHANCE_FIELD_NUMBER; hash = (53 * hash) + Float.floatToIntBits( getChance()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RandomRowFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); chance_ = 0F; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.chance_ = chance_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance()) return this; if (other.hasChance()) { setChance(other.getChance()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasChance()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required float chance = 1; private float chance_ ; /** * <code>required float chance = 1;</code> */ public boolean hasChance() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required float chance = 1;</code> */ public float getChance() { return chance_; } /** * <code>required float chance = 1;</code> */ public Builder setChance(float value) { bitField0_ |= 0x00000001; chance_ = value; onChanged(); return this; } /** * <code>required float chance = 1;</code> */ public Builder clearChance() { bitField0_ = (bitField0_ & ~0x00000001); chance_ = 0F; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.RandomRowFilter) } static { defaultInstance = new RandomRowFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RandomRowFilter) } public interface RowFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.CompareFilter compare_filter = 1; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ boolean hasCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.RowFilter} */ public static final class RowFilter extends com.google.protobuf.GeneratedMessage implements RowFilterOrBuilder { // Use RowFilter.newBuilder() to construct. private RowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RowFilter defaultInstance; public static RowFilter getDefaultInstance() { return defaultInstance; } public RowFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = compareFilter_.toBuilder(); } compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(compareFilter_); compareFilter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class); } public static com.google.protobuf.Parser<RowFilter> PARSER = new com.google.protobuf.AbstractParser<RowFilter>() { public RowFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RowFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RowFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; } if (!getCompareFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, compareFilter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, compareFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) obj; boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { result = result && getCompareFilter() .equals(other.getCompareFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RowFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (compareFilterBuilder_ == null) { result.compareFilter_ = compareFilter_; } else { result.compareFilter_ = compareFilterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { return false; } if (!getCompareFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; } else { return compareFilterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } compareFilter_ = value; onChanged(); } else { compareFilterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { compareFilter_ = builderForValue.build(); onChanged(); } else { compareFilterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); } else { compareFilter_ = value; } onChanged(); } else { compareFilterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); onChanged(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { return compareFilter_; } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( compareFilter_, getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.RowFilter) } static { defaultInstance = new RowFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RowFilter) } public interface SingleColumnValueExcludeFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ boolean hasSingleColumnValueFilter(); /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter(); /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.SingleColumnValueExcludeFilter} */ public static final class SingleColumnValueExcludeFilter extends com.google.protobuf.GeneratedMessage implements SingleColumnValueExcludeFilterOrBuilder { // Use SingleColumnValueExcludeFilter.newBuilder() to construct. private SingleColumnValueExcludeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SingleColumnValueExcludeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SingleColumnValueExcludeFilter defaultInstance; public static SingleColumnValueExcludeFilter getDefaultInstance() { return defaultInstance; } public SingleColumnValueExcludeFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SingleColumnValueExcludeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = singleColumnValueFilter_.toBuilder(); } singleColumnValueFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(singleColumnValueFilter_); singleColumnValueFilter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); } public static com.google.protobuf.Parser<SingleColumnValueExcludeFilter> PARSER = new com.google.protobuf.AbstractParser<SingleColumnValueExcludeFilter>() { public SingleColumnValueExcludeFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SingleColumnValueExcludeFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SingleColumnValueExcludeFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; public static final int SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_; /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public boolean hasSingleColumnValueFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { return singleColumnValueFilter_; } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { return singleColumnValueFilter_; } private void initFields() { singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasSingleColumnValueFilter()) { memoizedIsInitialized = 0; return false; } if (!getSingleColumnValueFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, singleColumnValueFilter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, singleColumnValueFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) obj; boolean result = true; result = result && (hasSingleColumnValueFilter() == other.hasSingleColumnValueFilter()); if (hasSingleColumnValueFilter()) { result = result && getSingleColumnValueFilter() .equals(other.getSingleColumnValueFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSingleColumnValueFilter()) { hash = (37 * hash) + SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getSingleColumnValueFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.SingleColumnValueExcludeFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSingleColumnValueFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (singleColumnValueFilterBuilder_ == null) { singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); } else { singleColumnValueFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (singleColumnValueFilterBuilder_ == null) { result.singleColumnValueFilter_ = singleColumnValueFilter_; } else { result.singleColumnValueFilter_ = singleColumnValueFilterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance()) return this; if (other.hasSingleColumnValueFilter()) { mergeSingleColumnValueFilter(other.getSingleColumnValueFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasSingleColumnValueFilter()) { return false; } if (!getSingleColumnValueFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> singleColumnValueFilterBuilder_; /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public boolean hasSingleColumnValueFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { if (singleColumnValueFilterBuilder_ == null) { return singleColumnValueFilter_; } else { return singleColumnValueFilterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public Builder setSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { if (singleColumnValueFilterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } singleColumnValueFilter_ = value; onChanged(); } else { singleColumnValueFilterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public Builder setSingleColumnValueFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder builderForValue) { if (singleColumnValueFilterBuilder_ == null) { singleColumnValueFilter_ = builderForValue.build(); onChanged(); } else { singleColumnValueFilterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public Builder mergeSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { if (singleColumnValueFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && singleColumnValueFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) { singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(singleColumnValueFilter_).mergeFrom(value).buildPartial(); } else { singleColumnValueFilter_ = value; } onChanged(); } else { singleColumnValueFilterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public Builder clearSingleColumnValueFilter() { if (singleColumnValueFilterBuilder_ == null) { singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); onChanged(); } else { singleColumnValueFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder getSingleColumnValueFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSingleColumnValueFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { if (singleColumnValueFilterBuilder_ != null) { return singleColumnValueFilterBuilder_.getMessageOrBuilder(); } else { return singleColumnValueFilter_; } } /** * <code>required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> getSingleColumnValueFilterFieldBuilder() { if (singleColumnValueFilterBuilder_ == null) { singleColumnValueFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>( singleColumnValueFilter_, getParentForChildren(), isClean()); singleColumnValueFilter_ = null; } return singleColumnValueFilterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.SingleColumnValueExcludeFilter) } static { defaultInstance = new SingleColumnValueExcludeFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueExcludeFilter) } public interface SingleColumnValueFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes column_family = 1; /** * <code>optional bytes column_family = 1;</code> */ boolean hasColumnFamily(); /** * <code>optional bytes column_family = 1;</code> */ com.google.protobuf.ByteString getColumnFamily(); // optional bytes column_qualifier = 2; /** * <code>optional bytes column_qualifier = 2;</code> */ boolean hasColumnQualifier(); /** * <code>optional bytes column_qualifier = 2;</code> */ com.google.protobuf.ByteString getColumnQualifier(); // required .hbase.pb.CompareType compare_op = 3; /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ boolean hasCompareOp(); /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); // required .hbase.pb.Comparator comparator = 4; /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ boolean hasComparator(); /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); // optional bool filter_if_missing = 5; /** * <code>optional bool filter_if_missing = 5;</code> */ boolean hasFilterIfMissing(); /** * <code>optional bool filter_if_missing = 5;</code> */ boolean getFilterIfMissing(); // optional bool latest_version_only = 6; /** * <code>optional bool latest_version_only = 6;</code> */ boolean hasLatestVersionOnly(); /** * <code>optional bool latest_version_only = 6;</code> */ boolean getLatestVersionOnly(); } /** * Protobuf type {@code hbase.pb.SingleColumnValueFilter} */ public static final class SingleColumnValueFilter extends com.google.protobuf.GeneratedMessage implements SingleColumnValueFilterOrBuilder { // Use SingleColumnValueFilter.newBuilder() to construct. private SingleColumnValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SingleColumnValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SingleColumnValueFilter defaultInstance; public static SingleColumnValueFilter getDefaultInstance() { return defaultInstance; } public SingleColumnValueFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SingleColumnValueFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; columnFamily_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; columnQualifier_ = input.readBytes(); break; } case 24: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; compareOp_ = value; } break; } case 34: { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; if (((bitField0_ & 0x00000008) == 0x00000008)) { subBuilder = comparator_.toBuilder(); } comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(comparator_); comparator_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } case 40: { bitField0_ |= 0x00000010; filterIfMissing_ = input.readBool(); break; } case 48: { bitField0_ |= 0x00000020; latestVersionOnly_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); } public static com.google.protobuf.Parser<SingleColumnValueFilter> PARSER = new com.google.protobuf.AbstractParser<SingleColumnValueFilter>() { public SingleColumnValueFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SingleColumnValueFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SingleColumnValueFilter> getParserForType() { return PARSER; } private int bitField0_; // optional bytes column_family = 1; public static final int COLUMN_FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString columnFamily_; /** * <code>optional bytes column_family = 1;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes column_family = 1;</code> */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } // optional bytes column_qualifier = 2; public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnQualifier_; /** * <code>optional bytes column_qualifier = 2;</code> */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes column_qualifier = 2;</code> */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } // required .hbase.pb.CompareType compare_op = 3; public static final int COMPARE_OP_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } // required .hbase.pb.Comparator comparator = 4; public static final int COMPARATOR_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public boolean hasComparator() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { return comparator_; } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { return comparator_; } // optional bool filter_if_missing = 5; public static final int FILTER_IF_MISSING_FIELD_NUMBER = 5; private boolean filterIfMissing_; /** * <code>optional bool filter_if_missing = 5;</code> */ public boolean hasFilterIfMissing() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool filter_if_missing = 5;</code> */ public boolean getFilterIfMissing() { return filterIfMissing_; } // optional bool latest_version_only = 6; public static final int LATEST_VERSION_ONLY_FIELD_NUMBER = 6; private boolean latestVersionOnly_; /** * <code>optional bool latest_version_only = 6;</code> */ public boolean hasLatestVersionOnly() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool latest_version_only = 6;</code> */ public boolean getLatestVersionOnly() { return latestVersionOnly_; } private void initFields() { columnFamily_ = com.google.protobuf.ByteString.EMPTY; columnQualifier_ = com.google.protobuf.ByteString.EMPTY; compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); filterIfMissing_ = false; latestVersionOnly_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCompareOp()) { memoizedIsInitialized = 0; return false; } if (!hasComparator()) { memoizedIsInitialized = 0; return false; } if (!getComparator().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, columnFamily_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, columnQualifier_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeEnum(3, compareOp_.getNumber()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeMessage(4, comparator_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, filterIfMissing_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(6, latestVersionOnly_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, columnFamily_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, columnQualifier_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, compareOp_.getNumber()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, comparator_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, filterIfMissing_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(6, latestVersionOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) obj; boolean result = true; result = result && (hasColumnFamily() == other.hasColumnFamily()); if (hasColumnFamily()) { result = result && getColumnFamily() .equals(other.getColumnFamily()); } result = result && (hasColumnQualifier() == other.hasColumnQualifier()); if (hasColumnQualifier()) { result = result && getColumnQualifier() .equals(other.getColumnQualifier()); } result = result && (hasCompareOp() == other.hasCompareOp()); if (hasCompareOp()) { result = result && (getCompareOp() == other.getCompareOp()); } result = result && (hasComparator() == other.hasComparator()); if (hasComparator()) { result = result && getComparator() .equals(other.getComparator()); } result = result && (hasFilterIfMissing() == other.hasFilterIfMissing()); if (hasFilterIfMissing()) { result = result && (getFilterIfMissing() == other.getFilterIfMissing()); } result = result && (hasLatestVersionOnly() == other.hasLatestVersionOnly()); if (hasLatestVersionOnly()) { result = result && (getLatestVersionOnly() == other.getLatestVersionOnly()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasColumnFamily()) { hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER; hash = (53 * hash) + getColumnFamily().hashCode(); } if (hasColumnQualifier()) { hash = (37 * hash) + COLUMN_QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getColumnQualifier().hashCode(); } if (hasCompareOp()) { hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getCompareOp()); } if (hasComparator()) { hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; hash = (53 * hash) + getComparator().hashCode(); } if (hasFilterIfMissing()) { hash = (37 * hash) + FILTER_IF_MISSING_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getFilterIfMissing()); } if (hasLatestVersionOnly()) { hash = (37 * hash) + LATEST_VERSION_ONLY_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getLatestVersionOnly()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.SingleColumnValueFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getComparatorFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); columnFamily_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); columnQualifier_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; bitField0_ = (bitField0_ & ~0x00000004); if (comparatorBuilder_ == null) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); } else { comparatorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); filterIfMissing_ = false; bitField0_ = (bitField0_ & ~0x00000010); latestVersionOnly_ = false; bitField0_ = (bitField0_ & ~0x00000020); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.columnFamily_ = columnFamily_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.columnQualifier_ = columnQualifier_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.compareOp_ = compareOp_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } if (comparatorBuilder_ == null) { result.comparator_ = comparator_; } else { result.comparator_ = comparatorBuilder_.build(); } if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.filterIfMissing_ = filterIfMissing_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.latestVersionOnly_ = latestVersionOnly_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) return this; if (other.hasColumnFamily()) { setColumnFamily(other.getColumnFamily()); } if (other.hasColumnQualifier()) { setColumnQualifier(other.getColumnQualifier()); } if (other.hasCompareOp()) { setCompareOp(other.getCompareOp()); } if (other.hasComparator()) { mergeComparator(other.getComparator()); } if (other.hasFilterIfMissing()) { setFilterIfMissing(other.getFilterIfMissing()); } if (other.hasLatestVersionOnly()) { setLatestVersionOnly(other.getLatestVersionOnly()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCompareOp()) { return false; } if (!hasComparator()) { return false; } if (!getComparator().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bytes column_family = 1; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes column_family = 1;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes column_family = 1;</code> */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } /** * <code>optional bytes column_family = 1;</code> */ public Builder setColumnFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; columnFamily_ = value; onChanged(); return this; } /** * <code>optional bytes column_family = 1;</code> */ public Builder clearColumnFamily() { bitField0_ = (bitField0_ & ~0x00000001); columnFamily_ = getDefaultInstance().getColumnFamily(); onChanged(); return this; } // optional bytes column_qualifier = 2; private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes column_qualifier = 2;</code> */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes column_qualifier = 2;</code> */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } /** * <code>optional bytes column_qualifier = 2;</code> */ public Builder setColumnQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; columnQualifier_ = value; onChanged(); return this; } /** * <code>optional bytes column_qualifier = 2;</code> */ public Builder clearColumnQualifier() { bitField0_ = (bitField0_ & ~0x00000002); columnQualifier_ = getDefaultInstance().getColumnQualifier(); onChanged(); return this; } // required .hbase.pb.CompareType compare_op = 3; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; compareOp_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.CompareType compare_op = 3;</code> */ public Builder clearCompareOp() { bitField0_ = (bitField0_ & ~0x00000004); compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; onChanged(); return this; } // required .hbase.pb.Comparator comparator = 4; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public boolean hasComparator() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { return comparator_; } else { return comparatorBuilder_.getMessage(); } } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } comparator_ = value; onChanged(); } else { comparatorBuilder_.setMessage(value); } bitField0_ |= 0x00000008; return this; } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public Builder setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { if (comparatorBuilder_ == null) { comparator_ = builderForValue.build(); onChanged(); } else { comparatorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); } else { comparator_ = value; } onChanged(); } else { comparatorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public Builder clearComparator() { if (comparatorBuilder_ == null) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); onChanged(); } else { comparatorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { bitField0_ |= 0x00000008; onChanged(); return getComparatorFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); } else { return comparator_; } } /** * <code>required .hbase.pb.Comparator comparator = 4;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { if (comparatorBuilder_ == null) { comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( comparator_, getParentForChildren(), isClean()); comparator_ = null; } return comparatorBuilder_; } // optional bool filter_if_missing = 5; private boolean filterIfMissing_ ; /** * <code>optional bool filter_if_missing = 5;</code> */ public boolean hasFilterIfMissing() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool filter_if_missing = 5;</code> */ public boolean getFilterIfMissing() { return filterIfMissing_; } /** * <code>optional bool filter_if_missing = 5;</code> */ public Builder setFilterIfMissing(boolean value) { bitField0_ |= 0x00000010; filterIfMissing_ = value; onChanged(); return this; } /** * <code>optional bool filter_if_missing = 5;</code> */ public Builder clearFilterIfMissing() { bitField0_ = (bitField0_ & ~0x00000010); filterIfMissing_ = false; onChanged(); return this; } // optional bool latest_version_only = 6; private boolean latestVersionOnly_ ; /** * <code>optional bool latest_version_only = 6;</code> */ public boolean hasLatestVersionOnly() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool latest_version_only = 6;</code> */ public boolean getLatestVersionOnly() { return latestVersionOnly_; } /** * <code>optional bool latest_version_only = 6;</code> */ public Builder setLatestVersionOnly(boolean value) { bitField0_ |= 0x00000020; latestVersionOnly_ = value; onChanged(); return this; } /** * <code>optional bool latest_version_only = 6;</code> */ public Builder clearLatestVersionOnly() { bitField0_ = (bitField0_ & ~0x00000020); latestVersionOnly_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.SingleColumnValueFilter) } static { defaultInstance = new SingleColumnValueFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueFilter) } public interface SkipFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.Filter filter = 1; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ boolean hasFilter(); /** * <code>required .hbase.pb.Filter filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter(); /** * <code>required .hbase.pb.Filter filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.SkipFilter} */ public static final class SkipFilter extends com.google.protobuf.GeneratedMessage implements SkipFilterOrBuilder { // Use SkipFilter.newBuilder() to construct. private SkipFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SkipFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SkipFilter defaultInstance; public static SkipFilter getDefaultInstance() { return defaultInstance; } public SkipFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SkipFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = filter_.toBuilder(); } filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(filter_); filter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class); } public static com.google.protobuf.Parser<SkipFilter> PARSER = new com.google.protobuf.AbstractParser<SkipFilter>() { public SkipFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SkipFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SkipFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { return filter_; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } private void initFields() { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFilter()) { memoizedIsInitialized = 0; return false; } if (!getFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, filter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) obj; boolean result = true; result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { result = result && getFilter() .equals(other.getFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFilter()) { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.SkipFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (filterBuilder_ == null) { result.filter_ = filter_; } else { result.filter_ = filterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance()) return this; if (other.hasFilter()) { mergeFilter(other.getFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFilter()) { return false; } if (!getFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.Filter filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; } else { return filterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); } else { filterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); onChanged(); } else { filterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); } else { filter_ = value; } onChanged(); } else { filterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); onChanged(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { return filter_; } } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( filter_, getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.SkipFilter) } static { defaultInstance = new SkipFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.SkipFilter) } public interface TimestampsFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated int64 timestamps = 1 [packed = true]; /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ java.util.List<java.lang.Long> getTimestampsList(); /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ int getTimestampsCount(); /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ long getTimestamps(int index); // optional bool can_hint = 2; /** * <code>optional bool can_hint = 2;</code> */ boolean hasCanHint(); /** * <code>optional bool can_hint = 2;</code> */ boolean getCanHint(); } /** * Protobuf type {@code hbase.pb.TimestampsFilter} */ public static final class TimestampsFilter extends com.google.protobuf.GeneratedMessage implements TimestampsFilterOrBuilder { // Use TimestampsFilter.newBuilder() to construct. private TimestampsFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private TimestampsFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final TimestampsFilter defaultInstance; public static TimestampsFilter getDefaultInstance() { return defaultInstance; } public TimestampsFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TimestampsFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { timestamps_ = new java.util.ArrayList<java.lang.Long>(); mutable_bitField0_ |= 0x00000001; } timestamps_.add(input.readInt64()); break; } case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { timestamps_ = new java.util.ArrayList<java.lang.Long>(); mutable_bitField0_ |= 0x00000001; } while (input.getBytesUntilLimit() > 0) { timestamps_.add(input.readInt64()); } input.popLimit(limit); break; } case 16: { bitField0_ |= 0x00000001; canHint_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { timestamps_ = java.util.Collections.unmodifiableList(timestamps_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); } public static com.google.protobuf.Parser<TimestampsFilter> PARSER = new com.google.protobuf.AbstractParser<TimestampsFilter>() { public TimestampsFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TimestampsFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<TimestampsFilter> getParserForType() { return PARSER; } private int bitField0_; // repeated int64 timestamps = 1 [packed = true]; public static final int TIMESTAMPS_FIELD_NUMBER = 1; private java.util.List<java.lang.Long> timestamps_; /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public java.util.List<java.lang.Long> getTimestampsList() { return timestamps_; } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public int getTimestampsCount() { return timestamps_.size(); } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public long getTimestamps(int index) { return timestamps_.get(index); } private int timestampsMemoizedSerializedSize = -1; // optional bool can_hint = 2; public static final int CAN_HINT_FIELD_NUMBER = 2; private boolean canHint_; /** * <code>optional bool can_hint = 2;</code> */ public boolean hasCanHint() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool can_hint = 2;</code> */ public boolean getCanHint() { return canHint_; } private void initFields() { timestamps_ = java.util.Collections.emptyList(); canHint_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getTimestampsList().size() > 0) { output.writeRawVarint32(10); output.writeRawVarint32(timestampsMemoizedSerializedSize); } for (int i = 0; i < timestamps_.size(); i++) { output.writeInt64NoTag(timestamps_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(2, canHint_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < timestamps_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeInt64SizeNoTag(timestamps_.get(i)); } size += dataSize; if (!getTimestampsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } timestampsMemoizedSerializedSize = dataSize; } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, canHint_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) obj; boolean result = true; result = result && getTimestampsList() .equals(other.getTimestampsList()); result = result && (hasCanHint() == other.hasCanHint()); if (hasCanHint()) { result = result && (getCanHint() == other.getCanHint()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTimestampsCount() > 0) { hash = (37 * hash) + TIMESTAMPS_FIELD_NUMBER; hash = (53 * hash) + getTimestampsList().hashCode(); } if (hasCanHint()) { hash = (37 * hash) + CAN_HINT_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getCanHint()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.TimestampsFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); timestamps_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); canHint_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { timestamps_ = java.util.Collections.unmodifiableList(timestamps_); bitField0_ = (bitField0_ & ~0x00000001); } result.timestamps_ = timestamps_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.canHint_ = canHint_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance()) return this; if (!other.timestamps_.isEmpty()) { if (timestamps_.isEmpty()) { timestamps_ = other.timestamps_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTimestampsIsMutable(); timestamps_.addAll(other.timestamps_); } onChanged(); } if (other.hasCanHint()) { setCanHint(other.getCanHint()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated int64 timestamps = 1 [packed = true]; private java.util.List<java.lang.Long> timestamps_ = java.util.Collections.emptyList(); private void ensureTimestampsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { timestamps_ = new java.util.ArrayList<java.lang.Long>(timestamps_); bitField0_ |= 0x00000001; } } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public java.util.List<java.lang.Long> getTimestampsList() { return java.util.Collections.unmodifiableList(timestamps_); } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public int getTimestampsCount() { return timestamps_.size(); } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public long getTimestamps(int index) { return timestamps_.get(index); } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public Builder setTimestamps( int index, long value) { ensureTimestampsIsMutable(); timestamps_.set(index, value); onChanged(); return this; } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public Builder addTimestamps(long value) { ensureTimestampsIsMutable(); timestamps_.add(value); onChanged(); return this; } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public Builder addAllTimestamps( java.lang.Iterable<? extends java.lang.Long> values) { ensureTimestampsIsMutable(); super.addAll(values, timestamps_); onChanged(); return this; } /** * <code>repeated int64 timestamps = 1 [packed = true];</code> */ public Builder clearTimestamps() { timestamps_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } // optional bool can_hint = 2; private boolean canHint_ ; /** * <code>optional bool can_hint = 2;</code> */ public boolean hasCanHint() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool can_hint = 2;</code> */ public boolean getCanHint() { return canHint_; } /** * <code>optional bool can_hint = 2;</code> */ public Builder setCanHint(boolean value) { bitField0_ |= 0x00000002; canHint_ = value; onChanged(); return this; } /** * <code>optional bool can_hint = 2;</code> */ public Builder clearCanHint() { bitField0_ = (bitField0_ & ~0x00000002); canHint_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.TimestampsFilter) } static { defaultInstance = new TimestampsFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.TimestampsFilter) } public interface ValueFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.CompareFilter compare_filter = 1; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ boolean hasCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.ValueFilter} */ public static final class ValueFilter extends com.google.protobuf.GeneratedMessage implements ValueFilterOrBuilder { // Use ValueFilter.newBuilder() to construct. private ValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ValueFilter defaultInstance; public static ValueFilter getDefaultInstance() { return defaultInstance; } public ValueFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ValueFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = compareFilter_.toBuilder(); } compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(compareFilter_); compareFilter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class); } public static com.google.protobuf.Parser<ValueFilter> PARSER = new com.google.protobuf.AbstractParser<ValueFilter>() { public ValueFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ValueFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ValueFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; } if (!getCompareFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, compareFilter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, compareFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) obj; boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { result = result && getCompareFilter() .equals(other.getCompareFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ValueFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (compareFilterBuilder_ == null) { result.compareFilter_ = compareFilter_; } else { result.compareFilter_ = compareFilterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { return false; } if (!getCompareFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.CompareFilter compare_filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; } else { return compareFilterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } compareFilter_ = value; onChanged(); } else { compareFilterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { compareFilter_ = builderForValue.build(); onChanged(); } else { compareFilterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); } else { compareFilter_ = value; } onChanged(); } else { compareFilterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); onChanged(); } else { compareFilterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { return compareFilter_; } } /** * <code>required .hbase.pb.CompareFilter compare_filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( compareFilter_, getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.ValueFilter) } static { defaultInstance = new ValueFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ValueFilter) } public interface WhileMatchFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.Filter filter = 1; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ boolean hasFilter(); /** * <code>required .hbase.pb.Filter filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter(); /** * <code>required .hbase.pb.Filter filter = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); } /** * Protobuf type {@code hbase.pb.WhileMatchFilter} */ public static final class WhileMatchFilter extends com.google.protobuf.GeneratedMessage implements WhileMatchFilterOrBuilder { // Use WhileMatchFilter.newBuilder() to construct. private WhileMatchFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private WhileMatchFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final WhileMatchFilter defaultInstance; public static WhileMatchFilter getDefaultInstance() { return defaultInstance; } public WhileMatchFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WhileMatchFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = filter_.toBuilder(); } filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(filter_); filter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); } public static com.google.protobuf.Parser<WhileMatchFilter> PARSER = new com.google.protobuf.AbstractParser<WhileMatchFilter>() { public WhileMatchFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new WhileMatchFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<WhileMatchFilter> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { return filter_; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } private void initFields() { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFilter()) { memoizedIsInitialized = 0; return false; } if (!getFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, filter_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) obj; boolean result = true; result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { result = result && getFilter() .equals(other.getFilter()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFilter()) { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.WhileMatchFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFilterFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (filterBuilder_ == null) { result.filter_ = filter_; } else { result.filter_ = filterBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance()) return this; if (other.hasFilter()) { mergeFilter(other.getFilter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFilter()) { return false; } if (!getFilter().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.Filter filter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; } else { return filterBuilder_.getMessage(); } } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); } else { filterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); onChanged(); } else { filterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); } else { filter_ = value; } onChanged(); } else { filterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); onChanged(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getFilterFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { return filter_; } } /** * <code>required .hbase.pb.Filter filter = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( filter_, getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.WhileMatchFilter) } static { defaultInstance = new WhileMatchFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.WhileMatchFilter) } public interface FilterAllFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.FilterAllFilter} */ public static final class FilterAllFilter extends com.google.protobuf.GeneratedMessage implements FilterAllFilterOrBuilder { // Use FilterAllFilter.newBuilder() to construct. private FilterAllFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FilterAllFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FilterAllFilter defaultInstance; public static FilterAllFilter getDefaultInstance() { return defaultInstance; } public FilterAllFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FilterAllFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.Builder.class); } public static com.google.protobuf.Parser<FilterAllFilter> PARSER = new com.google.protobuf.AbstractParser<FilterAllFilter>() { public FilterAllFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FilterAllFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FilterAllFilter> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FilterAllFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.FilterAllFilter) } static { defaultInstance = new FilterAllFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FilterAllFilter) } public interface RowRangeOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes start_row = 1; /** * <code>optional bytes start_row = 1;</code> */ boolean hasStartRow(); /** * <code>optional bytes start_row = 1;</code> */ com.google.protobuf.ByteString getStartRow(); // optional bool start_row_inclusive = 2; /** * <code>optional bool start_row_inclusive = 2;</code> */ boolean hasStartRowInclusive(); /** * <code>optional bool start_row_inclusive = 2;</code> */ boolean getStartRowInclusive(); // optional bytes stop_row = 3; /** * <code>optional bytes stop_row = 3;</code> */ boolean hasStopRow(); /** * <code>optional bytes stop_row = 3;</code> */ com.google.protobuf.ByteString getStopRow(); // optional bool stop_row_inclusive = 4; /** * <code>optional bool stop_row_inclusive = 4;</code> */ boolean hasStopRowInclusive(); /** * <code>optional bool stop_row_inclusive = 4;</code> */ boolean getStopRowInclusive(); } /** * Protobuf type {@code hbase.pb.RowRange} */ public static final class RowRange extends com.google.protobuf.GeneratedMessage implements RowRangeOrBuilder { // Use RowRange.newBuilder() to construct. private RowRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RowRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RowRange defaultInstance; public static RowRange getDefaultInstance() { return defaultInstance; } public RowRange getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RowRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; startRow_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; startRowInclusive_ = input.readBool(); break; } case 26: { bitField0_ |= 0x00000004; stopRow_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; stopRowInclusive_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder.class); } public static com.google.protobuf.Parser<RowRange> PARSER = new com.google.protobuf.AbstractParser<RowRange>() { public RowRange parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RowRange(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RowRange> getParserForType() { return PARSER; } private int bitField0_; // optional bytes start_row = 1; public static final int START_ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString startRow_; /** * <code>optional bytes start_row = 1;</code> */ public boolean hasStartRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes start_row = 1;</code> */ public com.google.protobuf.ByteString getStartRow() { return startRow_; } // optional bool start_row_inclusive = 2; public static final int START_ROW_INCLUSIVE_FIELD_NUMBER = 2; private boolean startRowInclusive_; /** * <code>optional bool start_row_inclusive = 2;</code> */ public boolean hasStartRowInclusive() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool start_row_inclusive = 2;</code> */ public boolean getStartRowInclusive() { return startRowInclusive_; } // optional bytes stop_row = 3; public static final int STOP_ROW_FIELD_NUMBER = 3; private com.google.protobuf.ByteString stopRow_; /** * <code>optional bytes stop_row = 3;</code> */ public boolean hasStopRow() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes stop_row = 3;</code> */ public com.google.protobuf.ByteString getStopRow() { return stopRow_; } // optional bool stop_row_inclusive = 4; public static final int STOP_ROW_INCLUSIVE_FIELD_NUMBER = 4; private boolean stopRowInclusive_; /** * <code>optional bool stop_row_inclusive = 4;</code> */ public boolean hasStopRowInclusive() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool stop_row_inclusive = 4;</code> */ public boolean getStopRowInclusive() { return stopRowInclusive_; } private void initFields() { startRow_ = com.google.protobuf.ByteString.EMPTY; startRowInclusive_ = false; stopRow_ = com.google.protobuf.ByteString.EMPTY; stopRowInclusive_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, startRow_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, startRowInclusive_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, stopRow_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, stopRowInclusive_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, startRow_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, startRowInclusive_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, stopRow_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, stopRowInclusive_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) obj; boolean result = true; result = result && (hasStartRow() == other.hasStartRow()); if (hasStartRow()) { result = result && getStartRow() .equals(other.getStartRow()); } result = result && (hasStartRowInclusive() == other.hasStartRowInclusive()); if (hasStartRowInclusive()) { result = result && (getStartRowInclusive() == other.getStartRowInclusive()); } result = result && (hasStopRow() == other.hasStopRow()); if (hasStopRow()) { result = result && getStopRow() .equals(other.getStopRow()); } result = result && (hasStopRowInclusive() == other.hasStopRowInclusive()); if (hasStopRowInclusive()) { result = result && (getStopRowInclusive() == other.getStopRowInclusive()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasStartRow()) { hash = (37 * hash) + START_ROW_FIELD_NUMBER; hash = (53 * hash) + getStartRow().hashCode(); } if (hasStartRowInclusive()) { hash = (37 * hash) + START_ROW_INCLUSIVE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getStartRowInclusive()); } if (hasStopRow()) { hash = (37 * hash) + STOP_ROW_FIELD_NUMBER; hash = (53 * hash) + getStopRow().hashCode(); } if (hasStopRowInclusive()) { hash = (37 * hash) + STOP_ROW_INCLUSIVE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getStopRowInclusive()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RowRange} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); startRow_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); startRowInclusive_ = false; bitField0_ = (bitField0_ & ~0x00000002); stopRow_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); stopRowInclusive_ = false; bitField0_ = (bitField0_ & ~0x00000008); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.startRow_ = startRow_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.startRowInclusive_ = startRowInclusive_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.stopRow_ = stopRow_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.stopRowInclusive_ = stopRowInclusive_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance()) return this; if (other.hasStartRow()) { setStartRow(other.getStartRow()); } if (other.hasStartRowInclusive()) { setStartRowInclusive(other.getStartRowInclusive()); } if (other.hasStopRow()) { setStopRow(other.getStopRow()); } if (other.hasStopRowInclusive()) { setStopRowInclusive(other.getStopRowInclusive()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bytes start_row = 1; private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes start_row = 1;</code> */ public boolean hasStartRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes start_row = 1;</code> */ public com.google.protobuf.ByteString getStartRow() { return startRow_; } /** * <code>optional bytes start_row = 1;</code> */ public Builder setStartRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; startRow_ = value; onChanged(); return this; } /** * <code>optional bytes start_row = 1;</code> */ public Builder clearStartRow() { bitField0_ = (bitField0_ & ~0x00000001); startRow_ = getDefaultInstance().getStartRow(); onChanged(); return this; } // optional bool start_row_inclusive = 2; private boolean startRowInclusive_ ; /** * <code>optional bool start_row_inclusive = 2;</code> */ public boolean hasStartRowInclusive() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool start_row_inclusive = 2;</code> */ public boolean getStartRowInclusive() { return startRowInclusive_; } /** * <code>optional bool start_row_inclusive = 2;</code> */ public Builder setStartRowInclusive(boolean value) { bitField0_ |= 0x00000002; startRowInclusive_ = value; onChanged(); return this; } /** * <code>optional bool start_row_inclusive = 2;</code> */ public Builder clearStartRowInclusive() { bitField0_ = (bitField0_ & ~0x00000002); startRowInclusive_ = false; onChanged(); return this; } // optional bytes stop_row = 3; private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes stop_row = 3;</code> */ public boolean hasStopRow() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes stop_row = 3;</code> */ public com.google.protobuf.ByteString getStopRow() { return stopRow_; } /** * <code>optional bytes stop_row = 3;</code> */ public Builder setStopRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; stopRow_ = value; onChanged(); return this; } /** * <code>optional bytes stop_row = 3;</code> */ public Builder clearStopRow() { bitField0_ = (bitField0_ & ~0x00000004); stopRow_ = getDefaultInstance().getStopRow(); onChanged(); return this; } // optional bool stop_row_inclusive = 4; private boolean stopRowInclusive_ ; /** * <code>optional bool stop_row_inclusive = 4;</code> */ public boolean hasStopRowInclusive() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool stop_row_inclusive = 4;</code> */ public boolean getStopRowInclusive() { return stopRowInclusive_; } /** * <code>optional bool stop_row_inclusive = 4;</code> */ public Builder setStopRowInclusive(boolean value) { bitField0_ |= 0x00000008; stopRowInclusive_ = value; onChanged(); return this; } /** * <code>optional bool stop_row_inclusive = 4;</code> */ public Builder clearStopRowInclusive() { bitField0_ = (bitField0_ & ~0x00000008); stopRowInclusive_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.RowRange) } static { defaultInstance = new RowRange(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RowRange) } public interface MultiRowRangeFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .hbase.pb.RowRange row_range_list = 1; /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> getRowRangeListList(); /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index); /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ int getRowRangeListCount(); /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder> getRowRangeListOrBuilderList(); /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.MultiRowRangeFilter} */ public static final class MultiRowRangeFilter extends com.google.protobuf.GeneratedMessage implements MultiRowRangeFilterOrBuilder { // Use MultiRowRangeFilter.newBuilder() to construct. private MultiRowRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private MultiRowRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final MultiRowRangeFilter defaultInstance; public static MultiRowRangeFilter getDefaultInstance() { return defaultInstance; } public MultiRowRangeFilter getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MultiRowRangeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { rowRangeList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>(); mutable_bitField0_ |= 0x00000001; } rowRangeList_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { rowRangeList_ = java.util.Collections.unmodifiableList(rowRangeList_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.Builder.class); } public static com.google.protobuf.Parser<MultiRowRangeFilter> PARSER = new com.google.protobuf.AbstractParser<MultiRowRangeFilter>() { public MultiRowRangeFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MultiRowRangeFilter(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<MultiRowRangeFilter> getParserForType() { return PARSER; } // repeated .hbase.pb.RowRange row_range_list = 1; public static final int ROW_RANGE_LIST_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> rowRangeList_; /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> getRowRangeListList() { return rowRangeList_; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder> getRowRangeListOrBuilderList() { return rowRangeList_; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public int getRowRangeListCount() { return rowRangeList_.size(); } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index) { return rowRangeList_.get(index); } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder( int index) { return rowRangeList_.get(index); } private void initFields() { rowRangeList_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < rowRangeList_.size(); i++) { output.writeMessage(1, rowRangeList_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < rowRangeList_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, rowRangeList_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) obj; boolean result = true; result = result && getRowRangeListList() .equals(other.getRowRangeListList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getRowRangeListCount() > 0) { hash = (37 * hash) + ROW_RANGE_LIST_FIELD_NUMBER; hash = (53 * hash) + getRowRangeListList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.MultiRowRangeFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRowRangeListFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (rowRangeListBuilder_ == null) { rowRangeList_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { rowRangeListBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter(this); int from_bitField0_ = bitField0_; if (rowRangeListBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { rowRangeList_ = java.util.Collections.unmodifiableList(rowRangeList_); bitField0_ = (bitField0_ & ~0x00000001); } result.rowRangeList_ = rowRangeList_; } else { result.rowRangeList_ = rowRangeListBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.getDefaultInstance()) return this; if (rowRangeListBuilder_ == null) { if (!other.rowRangeList_.isEmpty()) { if (rowRangeList_.isEmpty()) { rowRangeList_ = other.rowRangeList_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRowRangeListIsMutable(); rowRangeList_.addAll(other.rowRangeList_); } onChanged(); } } else { if (!other.rowRangeList_.isEmpty()) { if (rowRangeListBuilder_.isEmpty()) { rowRangeListBuilder_.dispose(); rowRangeListBuilder_ = null; rowRangeList_ = other.rowRangeList_; bitField0_ = (bitField0_ & ~0x00000001); rowRangeListBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getRowRangeListFieldBuilder() : null; } else { rowRangeListBuilder_.addAllMessages(other.rowRangeList_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .hbase.pb.RowRange row_range_list = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> rowRangeList_ = java.util.Collections.emptyList(); private void ensureRowRangeListIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { rowRangeList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>(rowRangeList_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder> rowRangeListBuilder_; /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> getRowRangeListList() { if (rowRangeListBuilder_ == null) { return java.util.Collections.unmodifiableList(rowRangeList_); } else { return rowRangeListBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public int getRowRangeListCount() { if (rowRangeListBuilder_ == null) { return rowRangeList_.size(); } else { return rowRangeListBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index) { if (rowRangeListBuilder_ == null) { return rowRangeList_.get(index); } else { return rowRangeListBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder setRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) { if (rowRangeListBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRowRangeListIsMutable(); rowRangeList_.set(index, value); onChanged(); } else { rowRangeListBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder setRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) { if (rowRangeListBuilder_ == null) { ensureRowRangeListIsMutable(); rowRangeList_.set(index, builderForValue.build()); onChanged(); } else { rowRangeListBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder addRowRangeList(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) { if (rowRangeListBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRowRangeListIsMutable(); rowRangeList_.add(value); onChanged(); } else { rowRangeListBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder addRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) { if (rowRangeListBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRowRangeListIsMutable(); rowRangeList_.add(index, value); onChanged(); } else { rowRangeListBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder addRowRangeList( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) { if (rowRangeListBuilder_ == null) { ensureRowRangeListIsMutable(); rowRangeList_.add(builderForValue.build()); onChanged(); } else { rowRangeListBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder addRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) { if (rowRangeListBuilder_ == null) { ensureRowRangeListIsMutable(); rowRangeList_.add(index, builderForValue.build()); onChanged(); } else { rowRangeListBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder addAllRowRangeList( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> values) { if (rowRangeListBuilder_ == null) { ensureRowRangeListIsMutable(); super.addAll(values, rowRangeList_); onChanged(); } else { rowRangeListBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder clearRowRangeList() { if (rowRangeListBuilder_ == null) { rowRangeList_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { rowRangeListBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public Builder removeRowRangeList(int index) { if (rowRangeListBuilder_ == null) { ensureRowRangeListIsMutable(); rowRangeList_.remove(index); onChanged(); } else { rowRangeListBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder getRowRangeListBuilder( int index) { return getRowRangeListFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder( int index) { if (rowRangeListBuilder_ == null) { return rowRangeList_.get(index); } else { return rowRangeListBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder> getRowRangeListOrBuilderList() { if (rowRangeListBuilder_ != null) { return rowRangeListBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(rowRangeList_); } } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder addRowRangeListBuilder() { return getRowRangeListFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder addRowRangeListBuilder( int index) { return getRowRangeListFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RowRange row_range_list = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder> getRowRangeListBuilderList() { return getRowRangeListFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder> getRowRangeListFieldBuilder() { if (rowRangeListBuilder_ == null) { rowRangeListBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>( rowRangeList_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); rowRangeList_ = null; } return rowRangeListBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.MultiRowRangeFilter) } static { defaultInstance = new MultiRowRangeFilter(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.MultiRowRangeFilter) } private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Filter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_Filter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnCountGetFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnPaginationFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnPrefixFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnRangeFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CompareFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_CompareFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DependentColumnFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FamilyFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FamilyFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FilterList_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FilterList_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FilterWrapper_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FilterWrapper_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FuzzyRowFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_InclusiveStopFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_KeyOnlyFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PageFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_PageFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PrefixFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_PrefixFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_QualifierFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_QualifierFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RandomRowFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RowFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RowFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SingleColumnValueFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SkipFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_SkipFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TimestampsFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ValueFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ValueFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WhileMatchFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FilterAllFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RowRange_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RowRange_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiRowRangeFilter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\014Filter.proto\022\010hbase.pb\032\013HBase.proto\032\020C" + "omparator.proto\"1\n\006Filter\022\014\n\004name\030\001 \002(\t\022" + "\031\n\021serialized_filter\030\002 \001(\014\"%\n\024ColumnCoun" + "tGetFilter\022\r\n\005limit\030\001 \002(\005\"N\n\026ColumnPagin" + "ationFilter\022\r\n\005limit\030\001 \002(\005\022\016\n\006offset\030\002 \001" + "(\005\022\025\n\rcolumn_offset\030\003 \001(\014\"$\n\022ColumnPrefi" + "xFilter\022\016\n\006prefix\030\001 \002(\014\"w\n\021ColumnRangeFi" + "lter\022\022\n\nmin_column\030\001 \001(\014\022\034\n\024min_column_i" + "nclusive\030\002 \001(\010\022\022\n\nmax_column\030\003 \001(\014\022\034\n\024ma" + "x_column_inclusive\030\004 \001(\010\"d\n\rCompareFilte", "r\022)\n\ncompare_op\030\001 \002(\0162\025.hbase.pb.Compare" + "Type\022(\n\ncomparator\030\002 \001(\0132\024.hbase.pb.Comp" + "arator\"\230\001\n\025DependentColumnFilter\022/\n\016comp" + "are_filter\030\001 \002(\0132\027.hbase.pb.CompareFilte" + "r\022\025\n\rcolumn_family\030\002 \001(\014\022\030\n\020column_quali" + "fier\030\003 \001(\014\022\035\n\025drop_dependent_column\030\004 \001(" + "\010\"?\n\014FamilyFilter\022/\n\016compare_filter\030\001 \002(" + "\0132\027.hbase.pb.CompareFilter\"\222\001\n\nFilterLis" + "t\022/\n\010operator\030\001 \002(\0162\035.hbase.pb.FilterLis" + "t.Operator\022!\n\007filters\030\002 \003(\0132\020.hbase.pb.F", "ilter\"0\n\010Operator\022\021\n\rMUST_PASS_ALL\020\001\022\021\n\r" + "MUST_PASS_ONE\020\002\"1\n\rFilterWrapper\022 \n\006filt" + "er\030\001 \002(\0132\020.hbase.pb.Filter\"\024\n\022FirstKeyOn" + "lyFilter\";\n%FirstKeyValueMatchingQualifi" + "ersFilter\022\022\n\nqualifiers\030\001 \003(\014\"C\n\016FuzzyRo" + "wFilter\0221\n\017fuzzy_keys_data\030\001 \003(\0132\030.hbase" + ".pb.BytesBytesPair\"+\n\023InclusiveStopFilte" + "r\022\024\n\014stop_row_key\030\001 \001(\014\"#\n\rKeyOnlyFilter" + "\022\022\n\nlen_as_val\030\001 \002(\010\"5\n\032MultipleColumnPr" + "efixFilter\022\027\n\017sorted_prefixes\030\001 \003(\014\"\037\n\nP", "ageFilter\022\021\n\tpage_size\030\001 \002(\003\"\036\n\014PrefixFi" + "lter\022\016\n\006prefix\030\001 \001(\014\"B\n\017QualifierFilter\022" + "/\n\016compare_filter\030\001 \002(\0132\027.hbase.pb.Compa" + "reFilter\"!\n\017RandomRowFilter\022\016\n\006chance\030\001 " + "\002(\002\"<\n\tRowFilter\022/\n\016compare_filter\030\001 \002(\013" + "2\027.hbase.pb.CompareFilter\"g\n\036SingleColum" + "nValueExcludeFilter\022E\n\032single_column_val" + "ue_filter\030\001 \002(\0132!.hbase.pb.SingleColumnV" + "alueFilter\"\327\001\n\027SingleColumnValueFilter\022\025" + "\n\rcolumn_family\030\001 \001(\014\022\030\n\020column_qualifie", "r\030\002 \001(\014\022)\n\ncompare_op\030\003 \002(\0162\025.hbase.pb.C" + "ompareType\022(\n\ncomparator\030\004 \002(\0132\024.hbase.p" + "b.Comparator\022\031\n\021filter_if_missing\030\005 \001(\010\022" + "\033\n\023latest_version_only\030\006 \001(\010\".\n\nSkipFilt" + "er\022 \n\006filter\030\001 \002(\0132\020.hbase.pb.Filter\"<\n\020" + "TimestampsFilter\022\026\n\ntimestamps\030\001 \003(\003B\002\020\001" + "\022\020\n\010can_hint\030\002 \001(\010\">\n\013ValueFilter\022/\n\016com" + "pare_filter\030\001 \002(\0132\027.hbase.pb.CompareFilt" + "er\"4\n\020WhileMatchFilter\022 \n\006filter\030\001 \002(\0132\020" + ".hbase.pb.Filter\"\021\n\017FilterAllFilter\"h\n\010R", "owRange\022\021\n\tstart_row\030\001 \001(\014\022\033\n\023start_row_" + "inclusive\030\002 \001(\010\022\020\n\010stop_row\030\003 \001(\014\022\032\n\022sto" + "p_row_inclusive\030\004 \001(\010\"A\n\023MultiRowRangeFi" + "lter\022*\n\016row_range_list\030\001 \003(\0132\022.hbase.pb." + "RowRangeBB\n*org.apache.hadoop.hbase.prot" + "obuf.generatedB\014FilterProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_hbase_pb_Filter_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_Filter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_Filter_descriptor, new java.lang.String[] { "Name", "SerializedFilter", }); internal_static_hbase_pb_ColumnCountGetFilter_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ColumnCountGetFilter_descriptor, new java.lang.String[] { "Limit", }); internal_static_hbase_pb_ColumnPaginationFilter_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ColumnPaginationFilter_descriptor, new java.lang.String[] { "Limit", "Offset", "ColumnOffset", }); internal_static_hbase_pb_ColumnPrefixFilter_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ColumnPrefixFilter_descriptor, new java.lang.String[] { "Prefix", }); internal_static_hbase_pb_ColumnRangeFilter_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ColumnRangeFilter_descriptor, new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", }); internal_static_hbase_pb_CompareFilter_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_CompareFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CompareFilter_descriptor, new java.lang.String[] { "CompareOp", "Comparator", }); internal_static_hbase_pb_DependentColumnFilter_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_DependentColumnFilter_descriptor, new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", }); internal_static_hbase_pb_FamilyFilter_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_FamilyFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FamilyFilter_descriptor, new java.lang.String[] { "CompareFilter", }); internal_static_hbase_pb_FilterList_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hbase_pb_FilterList_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FilterList_descriptor, new java.lang.String[] { "Operator", "Filters", }); internal_static_hbase_pb_FilterWrapper_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_hbase_pb_FilterWrapper_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FilterWrapper_descriptor, new java.lang.String[] { "Filter", }); internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor, new java.lang.String[] { "Qualifiers", }); internal_static_hbase_pb_FuzzyRowFilter_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FuzzyRowFilter_descriptor, new java.lang.String[] { "FuzzyKeysData", }); internal_static_hbase_pb_InclusiveStopFilter_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_InclusiveStopFilter_descriptor, new java.lang.String[] { "StopRowKey", }); internal_static_hbase_pb_KeyOnlyFilter_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_KeyOnlyFilter_descriptor, new java.lang.String[] { "LenAsVal", }); internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor, new java.lang.String[] { "SortedPrefixes", }); internal_static_hbase_pb_PageFilter_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_hbase_pb_PageFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_PageFilter_descriptor, new java.lang.String[] { "PageSize", }); internal_static_hbase_pb_PrefixFilter_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_hbase_pb_PrefixFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_PrefixFilter_descriptor, new java.lang.String[] { "Prefix", }); internal_static_hbase_pb_QualifierFilter_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_hbase_pb_QualifierFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_QualifierFilter_descriptor, new java.lang.String[] { "CompareFilter", }); internal_static_hbase_pb_RandomRowFilter_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RandomRowFilter_descriptor, new java.lang.String[] { "Chance", }); internal_static_hbase_pb_RowFilter_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_hbase_pb_RowFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RowFilter_descriptor, new java.lang.String[] { "CompareFilter", }); internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor, new java.lang.String[] { "SingleColumnValueFilter", }); internal_static_hbase_pb_SingleColumnValueFilter_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SingleColumnValueFilter_descriptor, new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FilterIfMissing", "LatestVersionOnly", }); internal_static_hbase_pb_SkipFilter_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_hbase_pb_SkipFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SkipFilter_descriptor, new java.lang.String[] { "Filter", }); internal_static_hbase_pb_TimestampsFilter_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_TimestampsFilter_descriptor, new java.lang.String[] { "Timestamps", "CanHint", }); internal_static_hbase_pb_ValueFilter_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_hbase_pb_ValueFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ValueFilter_descriptor, new java.lang.String[] { "CompareFilter", }); internal_static_hbase_pb_WhileMatchFilter_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_WhileMatchFilter_descriptor, new java.lang.String[] { "Filter", }); internal_static_hbase_pb_FilterAllFilter_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FilterAllFilter_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_RowRange_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_hbase_pb_RowRange_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RowRange_descriptor, new java.lang.String[] { "StartRow", "StartRowInclusive", "StopRow", "StopRowInclusive", }); internal_static_hbase_pb_MultiRowRangeFilter_descriptor = getDescriptor().getMessageTypes().get(29); internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_MultiRowRangeFilter_descriptor, new java.lang.String[] { "RowRangeList", }); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
gustavoanatoly/hbase
hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java
Java
apache-2.0
699,245
package org.knowm.xchart.demo.charts.date; import java.time.LocalDateTime; import java.time.Month; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Random; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.knowm.xchart.SwingWrapper; import org.knowm.xchart.XYChart; import org.knowm.xchart.XYChartBuilder; import org.knowm.xchart.demo.charts.ExampleChart; /** * Year scale * * <p>Demonstrates the following: * * <ul> * <li>Rotated 90 degrees X-Axis labels * <li>Setting custom X-Axis tick labels * <li>Setting custom cursor tool tip text */ public class DateChart09 implements ExampleChart<XYChart> { public static void main(String[] args) { ExampleChart<XYChart> exampleChart = new DateChart09(); XYChart chart = exampleChart.getChart(); new SwingWrapper<>(chart).displayChart(); } @Override public XYChart getChart() { // Create Chart XYChart chart = new XYChartBuilder().width(800).height(600).title(getClass().getSimpleName()).build(); // Customize Chart chart.getStyler().setLegendVisible(false); chart.getStyler().setXAxisLabelRotation(90); // Series List<Integer> xData = IntStream.range(0, 365).boxed().collect(Collectors.toList()); Random random = new Random(); List<Double> yData = IntStream.range(0, xData.size()) .mapToDouble(x -> random.nextDouble()) .boxed() .collect(Collectors.toList()); chart.addSeries("blah", xData, yData); // set custom X-Axis tick labels LocalDateTime startTime = LocalDateTime.of(2001, Month.JANUARY, 1, 0, 0, 0); DateTimeFormatter xTickFormatter = DateTimeFormatter.ofPattern("LLL"); chart .getStyler() .setxAxisTickLabelsFormattingFunction( x -> startTime.plusDays(x.longValue()).format(xTickFormatter)); // set custom cursor tool tip text chart.getStyler().setCursorEnabled(true); DateTimeFormatter cursorXFormatter = DateTimeFormatter.ofPattern("LLL dd"); chart .getStyler() .setCustomCursorXDataFormattingFunction( x -> startTime.plusDays(x.longValue()).format(cursorXFormatter)); return chart; } @Override public String getExampleChartName() { return getClass().getSimpleName() + " - Custom Date Formatter Without Years"; } }
nroduit/XChart
xchart-demo/src/main/java/org/knowm/xchart/demo/charts/date/DateChart09.java
Java
apache-2.0
2,383