repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
google/schemaorg-java | 37,474 | src/main/java/com/google/schemaorg/core/impl/OrganizationImpl.java | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.SchemaOrgTypeImpl;
import com.google.schemaorg.ValueType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.GoogConstants;
import com.google.schemaorg.goog.PopularityScoreSpecification;
/** Implementation of {@link Organization}. */
public class OrganizationImpl extends PlaceOrganizationCommonImpl implements Organization {
private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet();
private static ImmutableSet<String> initializePropertySet() {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE);
builder.add(CoreConstants.PROPERTY_ADDRESS);
builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING);
builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME);
builder.add(CoreConstants.PROPERTY_ALUMNI);
builder.add(CoreConstants.PROPERTY_AREA_SERVED);
builder.add(CoreConstants.PROPERTY_AWARD);
builder.add(CoreConstants.PROPERTY_AWARDS);
builder.add(CoreConstants.PROPERTY_BRAND);
builder.add(CoreConstants.PROPERTY_CONTACT_POINT);
builder.add(CoreConstants.PROPERTY_CONTACT_POINTS);
builder.add(CoreConstants.PROPERTY_DEPARTMENT);
builder.add(CoreConstants.PROPERTY_DESCRIPTION);
builder.add(CoreConstants.PROPERTY_DISSOLUTION_DATE);
builder.add(CoreConstants.PROPERTY_DUNS);
builder.add(CoreConstants.PROPERTY_EMAIL);
builder.add(CoreConstants.PROPERTY_EMPLOYEE);
builder.add(CoreConstants.PROPERTY_EMPLOYEES);
builder.add(CoreConstants.PROPERTY_EVENT);
builder.add(CoreConstants.PROPERTY_EVENTS);
builder.add(CoreConstants.PROPERTY_FAX_NUMBER);
builder.add(CoreConstants.PROPERTY_FOUNDER);
builder.add(CoreConstants.PROPERTY_FOUNDERS);
builder.add(CoreConstants.PROPERTY_FOUNDING_DATE);
builder.add(CoreConstants.PROPERTY_FOUNDING_LOCATION);
builder.add(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER);
builder.add(CoreConstants.PROPERTY_HAS_OFFER_CATALOG);
builder.add(CoreConstants.PROPERTY_HAS_POS);
builder.add(CoreConstants.PROPERTY_IMAGE);
builder.add(CoreConstants.PROPERTY_ISIC_V4);
builder.add(CoreConstants.PROPERTY_LEGAL_NAME);
builder.add(CoreConstants.PROPERTY_LOCATION);
builder.add(CoreConstants.PROPERTY_LOGO);
builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE);
builder.add(CoreConstants.PROPERTY_MAKES_OFFER);
builder.add(CoreConstants.PROPERTY_MEMBER);
builder.add(CoreConstants.PROPERTY_MEMBER_OF);
builder.add(CoreConstants.PROPERTY_MEMBERS);
builder.add(CoreConstants.PROPERTY_NAICS);
builder.add(CoreConstants.PROPERTY_NAME);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES);
builder.add(CoreConstants.PROPERTY_OWNS);
builder.add(CoreConstants.PROPERTY_PARENT_ORGANIZATION);
builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION);
builder.add(CoreConstants.PROPERTY_REVIEW);
builder.add(CoreConstants.PROPERTY_REVIEWS);
builder.add(CoreConstants.PROPERTY_SAME_AS);
builder.add(CoreConstants.PROPERTY_SEEKS);
builder.add(CoreConstants.PROPERTY_SERVICE_AREA);
builder.add(CoreConstants.PROPERTY_SUB_ORGANIZATION);
builder.add(CoreConstants.PROPERTY_TAX_ID);
builder.add(CoreConstants.PROPERTY_TELEPHONE);
builder.add(CoreConstants.PROPERTY_URL);
builder.add(CoreConstants.PROPERTY_VAT_ID);
builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION);
builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE);
return builder.build();
}
static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<Organization.Builder>
implements Organization.Builder {
@Override
public Organization.Builder addAdditionalType(URL value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value);
}
@Override
public Organization.Builder addAdditionalType(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value));
}
@Override
public Organization.Builder addAddress(PostalAddress value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value);
}
@Override
public Organization.Builder addAddress(PostalAddress.Builder value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value.build());
}
@Override
public Organization.Builder addAddress(Text value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value);
}
@Override
public Organization.Builder addAddress(String value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, Text.of(value));
}
@Override
public Organization.Builder addAggregateRating(AggregateRating value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value);
}
@Override
public Organization.Builder addAggregateRating(AggregateRating.Builder value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build());
}
@Override
public Organization.Builder addAggregateRating(String value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value));
}
@Override
public Organization.Builder addAlternateName(Text value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value);
}
@Override
public Organization.Builder addAlternateName(String value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value));
}
@Override
public Organization.Builder addAlumni(Person value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, value);
}
@Override
public Organization.Builder addAlumni(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, value.build());
}
@Override
public Organization.Builder addAlumni(String value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, Text.of(value));
}
@Override
public Organization.Builder addAreaServed(AdministrativeArea value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public Organization.Builder addAreaServed(AdministrativeArea.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public Organization.Builder addAreaServed(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public Organization.Builder addAreaServed(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public Organization.Builder addAreaServed(Place value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public Organization.Builder addAreaServed(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public Organization.Builder addAreaServed(Text value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public Organization.Builder addAreaServed(String value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, Text.of(value));
}
@Override
public Organization.Builder addAward(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARD, value);
}
@Override
public Organization.Builder addAward(String value) {
return addProperty(CoreConstants.PROPERTY_AWARD, Text.of(value));
}
@Override
public Organization.Builder addAwards(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, value);
}
@Override
public Organization.Builder addAwards(String value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, Text.of(value));
}
@Override
public Organization.Builder addBrand(Brand value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public Organization.Builder addBrand(Brand.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public Organization.Builder addBrand(Organization value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public Organization.Builder addBrand(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public Organization.Builder addBrand(String value) {
return addProperty(CoreConstants.PROPERTY_BRAND, Text.of(value));
}
@Override
public Organization.Builder addContactPoint(ContactPoint value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value);
}
@Override
public Organization.Builder addContactPoint(ContactPoint.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value.build());
}
@Override
public Organization.Builder addContactPoint(String value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, Text.of(value));
}
@Override
public Organization.Builder addContactPoints(ContactPoint value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value);
}
@Override
public Organization.Builder addContactPoints(ContactPoint.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value.build());
}
@Override
public Organization.Builder addContactPoints(String value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, Text.of(value));
}
@Override
public Organization.Builder addDepartment(Organization value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value);
}
@Override
public Organization.Builder addDepartment(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value.build());
}
@Override
public Organization.Builder addDepartment(String value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, Text.of(value));
}
@Override
public Organization.Builder addDescription(Text value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value);
}
@Override
public Organization.Builder addDescription(String value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value));
}
@Override
public Organization.Builder addDissolutionDate(Date value) {
return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, value);
}
@Override
public Organization.Builder addDissolutionDate(String value) {
return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, Text.of(value));
}
@Override
public Organization.Builder addDuns(Text value) {
return addProperty(CoreConstants.PROPERTY_DUNS, value);
}
@Override
public Organization.Builder addDuns(String value) {
return addProperty(CoreConstants.PROPERTY_DUNS, Text.of(value));
}
@Override
public Organization.Builder addEmail(Text value) {
return addProperty(CoreConstants.PROPERTY_EMAIL, value);
}
@Override
public Organization.Builder addEmail(String value) {
return addProperty(CoreConstants.PROPERTY_EMAIL, Text.of(value));
}
@Override
public Organization.Builder addEmployee(Person value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value);
}
@Override
public Organization.Builder addEmployee(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value.build());
}
@Override
public Organization.Builder addEmployee(String value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, Text.of(value));
}
@Override
public Organization.Builder addEmployees(Person value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value);
}
@Override
public Organization.Builder addEmployees(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value.build());
}
@Override
public Organization.Builder addEmployees(String value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, Text.of(value));
}
@Override
public Organization.Builder addEvent(Event value) {
return addProperty(CoreConstants.PROPERTY_EVENT, value);
}
@Override
public Organization.Builder addEvent(Event.Builder value) {
return addProperty(CoreConstants.PROPERTY_EVENT, value.build());
}
@Override
public Organization.Builder addEvent(String value) {
return addProperty(CoreConstants.PROPERTY_EVENT, Text.of(value));
}
@Override
public Organization.Builder addEvents(Event value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, value);
}
@Override
public Organization.Builder addEvents(Event.Builder value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, value.build());
}
@Override
public Organization.Builder addEvents(String value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, Text.of(value));
}
@Override
public Organization.Builder addFaxNumber(Text value) {
return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, value);
}
@Override
public Organization.Builder addFaxNumber(String value) {
return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, Text.of(value));
}
@Override
public Organization.Builder addFounder(Person value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, value);
}
@Override
public Organization.Builder addFounder(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, value.build());
}
@Override
public Organization.Builder addFounder(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, Text.of(value));
}
@Override
public Organization.Builder addFounders(Person value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, value);
}
@Override
public Organization.Builder addFounders(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, value.build());
}
@Override
public Organization.Builder addFounders(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, Text.of(value));
}
@Override
public Organization.Builder addFoundingDate(Date value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, value);
}
@Override
public Organization.Builder addFoundingDate(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, Text.of(value));
}
@Override
public Organization.Builder addFoundingLocation(Place value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value);
}
@Override
public Organization.Builder addFoundingLocation(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value.build());
}
@Override
public Organization.Builder addFoundingLocation(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, Text.of(value));
}
@Override
public Organization.Builder addGlobalLocationNumber(Text value) {
return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, value);
}
@Override
public Organization.Builder addGlobalLocationNumber(String value) {
return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, Text.of(value));
}
@Override
public Organization.Builder addHasOfferCatalog(OfferCatalog value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value);
}
@Override
public Organization.Builder addHasOfferCatalog(OfferCatalog.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value.build());
}
@Override
public Organization.Builder addHasOfferCatalog(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, Text.of(value));
}
@Override
public Organization.Builder addHasPOS(Place value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, value);
}
@Override
public Organization.Builder addHasPOS(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, value.build());
}
@Override
public Organization.Builder addHasPOS(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, Text.of(value));
}
@Override
public Organization.Builder addImage(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public Organization.Builder addImage(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value.build());
}
@Override
public Organization.Builder addImage(URL value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public Organization.Builder addImage(String value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value));
}
@Override
public Organization.Builder addIsicV4(Text value) {
return addProperty(CoreConstants.PROPERTY_ISIC_V4, value);
}
@Override
public Organization.Builder addIsicV4(String value) {
return addProperty(CoreConstants.PROPERTY_ISIC_V4, Text.of(value));
}
@Override
public Organization.Builder addLegalName(Text value) {
return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, value);
}
@Override
public Organization.Builder addLegalName(String value) {
return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, Text.of(value));
}
@Override
public Organization.Builder addLocation(Place value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public Organization.Builder addLocation(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value.build());
}
@Override
public Organization.Builder addLocation(PostalAddress value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public Organization.Builder addLocation(PostalAddress.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value.build());
}
@Override
public Organization.Builder addLocation(Text value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public Organization.Builder addLocation(String value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, Text.of(value));
}
@Override
public Organization.Builder addLogo(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public Organization.Builder addLogo(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value.build());
}
@Override
public Organization.Builder addLogo(URL value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public Organization.Builder addLogo(String value) {
return addProperty(CoreConstants.PROPERTY_LOGO, Text.of(value));
}
@Override
public Organization.Builder addMainEntityOfPage(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public Organization.Builder addMainEntityOfPage(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build());
}
@Override
public Organization.Builder addMainEntityOfPage(URL value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public Organization.Builder addMainEntityOfPage(String value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value));
}
@Override
public Organization.Builder addMakesOffer(Offer value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value);
}
@Override
public Organization.Builder addMakesOffer(Offer.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value.build());
}
@Override
public Organization.Builder addMakesOffer(String value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, Text.of(value));
}
@Override
public Organization.Builder addMember(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value);
}
@Override
public Organization.Builder addMember(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value.build());
}
@Override
public Organization.Builder addMember(Person value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value);
}
@Override
public Organization.Builder addMember(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value.build());
}
@Override
public Organization.Builder addMember(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, Text.of(value));
}
@Override
public Organization.Builder addMemberOf(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value);
}
@Override
public Organization.Builder addMemberOf(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build());
}
@Override
public Organization.Builder addMemberOf(ProgramMembership value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value);
}
@Override
public Organization.Builder addMemberOf(ProgramMembership.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build());
}
@Override
public Organization.Builder addMemberOf(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, Text.of(value));
}
@Override
public Organization.Builder addMembers(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value);
}
@Override
public Organization.Builder addMembers(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build());
}
@Override
public Organization.Builder addMembers(Person value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value);
}
@Override
public Organization.Builder addMembers(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build());
}
@Override
public Organization.Builder addMembers(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, Text.of(value));
}
@Override
public Organization.Builder addNaics(Text value) {
return addProperty(CoreConstants.PROPERTY_NAICS, value);
}
@Override
public Organization.Builder addNaics(String value) {
return addProperty(CoreConstants.PROPERTY_NAICS, Text.of(value));
}
@Override
public Organization.Builder addName(Text value) {
return addProperty(CoreConstants.PROPERTY_NAME, value);
}
@Override
public Organization.Builder addName(String value) {
return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value));
}
@Override
public Organization.Builder addNumberOfEmployees(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value);
}
@Override
public Organization.Builder addNumberOfEmployees(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value.build());
}
@Override
public Organization.Builder addNumberOfEmployees(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, Text.of(value));
}
@Override
public Organization.Builder addOwns(OwnershipInfo value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value);
}
@Override
public Organization.Builder addOwns(OwnershipInfo.Builder value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value.build());
}
@Override
public Organization.Builder addOwns(Product value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value);
}
@Override
public Organization.Builder addOwns(Product.Builder value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value.build());
}
@Override
public Organization.Builder addOwns(String value) {
return addProperty(CoreConstants.PROPERTY_OWNS, Text.of(value));
}
@Override
public Organization.Builder addParentOrganization(Organization value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value);
}
@Override
public Organization.Builder addParentOrganization(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value.build());
}
@Override
public Organization.Builder addParentOrganization(String value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, Text.of(value));
}
@Override
public Organization.Builder addPotentialAction(Action value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value);
}
@Override
public Organization.Builder addPotentialAction(Action.Builder value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build());
}
@Override
public Organization.Builder addPotentialAction(String value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value));
}
@Override
public Organization.Builder addReview(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value);
}
@Override
public Organization.Builder addReview(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value.build());
}
@Override
public Organization.Builder addReview(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value));
}
@Override
public Organization.Builder addReviews(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value);
}
@Override
public Organization.Builder addReviews(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build());
}
@Override
public Organization.Builder addReviews(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value));
}
@Override
public Organization.Builder addSameAs(URL value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, value);
}
@Override
public Organization.Builder addSameAs(String value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value));
}
@Override
public Organization.Builder addSeeks(Demand value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, value);
}
@Override
public Organization.Builder addSeeks(Demand.Builder value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, value.build());
}
@Override
public Organization.Builder addSeeks(String value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, Text.of(value));
}
@Override
public Organization.Builder addServiceArea(AdministrativeArea value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public Organization.Builder addServiceArea(AdministrativeArea.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public Organization.Builder addServiceArea(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public Organization.Builder addServiceArea(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public Organization.Builder addServiceArea(Place value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public Organization.Builder addServiceArea(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public Organization.Builder addServiceArea(String value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, Text.of(value));
}
@Override
public Organization.Builder addSubOrganization(Organization value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value);
}
@Override
public Organization.Builder addSubOrganization(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value.build());
}
@Override
public Organization.Builder addSubOrganization(String value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, Text.of(value));
}
@Override
public Organization.Builder addTaxID(Text value) {
return addProperty(CoreConstants.PROPERTY_TAX_ID, value);
}
@Override
public Organization.Builder addTaxID(String value) {
return addProperty(CoreConstants.PROPERTY_TAX_ID, Text.of(value));
}
@Override
public Organization.Builder addTelephone(Text value) {
return addProperty(CoreConstants.PROPERTY_TELEPHONE, value);
}
@Override
public Organization.Builder addTelephone(String value) {
return addProperty(CoreConstants.PROPERTY_TELEPHONE, Text.of(value));
}
@Override
public Organization.Builder addUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_URL, value);
}
@Override
public Organization.Builder addUrl(String value) {
return addProperty(CoreConstants.PROPERTY_URL, Text.of(value));
}
@Override
public Organization.Builder addVatID(Text value) {
return addProperty(CoreConstants.PROPERTY_VAT_ID, value);
}
@Override
public Organization.Builder addVatID(String value) {
return addProperty(CoreConstants.PROPERTY_VAT_ID, Text.of(value));
}
@Override
public Organization.Builder addDetailedDescription(Article value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value);
}
@Override
public Organization.Builder addDetailedDescription(Article.Builder value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build());
}
@Override
public Organization.Builder addDetailedDescription(String value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value));
}
@Override
public Organization.Builder addPopularityScore(PopularityScoreSpecification value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value);
}
@Override
public Organization.Builder addPopularityScore(PopularityScoreSpecification.Builder value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build());
}
@Override
public Organization.Builder addPopularityScore(String value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value));
}
@Override
public Organization build() {
return new OrganizationImpl(properties, reverseMap);
}
}
public OrganizationImpl(
Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) {
super(properties, reverseMap);
}
@Override
public String getFullTypeName() {
return CoreConstants.TYPE_ORGANIZATION;
}
@Override
public boolean includesProperty(String property) {
return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(GoogConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(property);
}
@Override
public ImmutableList<SchemaOrgType> getAlumniList() {
return getProperty(CoreConstants.PROPERTY_ALUMNI);
}
@Override
public ImmutableList<SchemaOrgType> getAreaServedList() {
return getProperty(CoreConstants.PROPERTY_AREA_SERVED);
}
@Override
public ImmutableList<SchemaOrgType> getAwardList() {
return getProperty(CoreConstants.PROPERTY_AWARD);
}
@Override
public ImmutableList<SchemaOrgType> getAwardsList() {
return getProperty(CoreConstants.PROPERTY_AWARDS);
}
@Override
public ImmutableList<SchemaOrgType> getBrandList() {
return getProperty(CoreConstants.PROPERTY_BRAND);
}
@Override
public ImmutableList<SchemaOrgType> getContactPointList() {
return getProperty(CoreConstants.PROPERTY_CONTACT_POINT);
}
@Override
public ImmutableList<SchemaOrgType> getContactPointsList() {
return getProperty(CoreConstants.PROPERTY_CONTACT_POINTS);
}
@Override
public ImmutableList<SchemaOrgType> getDepartmentList() {
return getProperty(CoreConstants.PROPERTY_DEPARTMENT);
}
@Override
public ImmutableList<SchemaOrgType> getDissolutionDateList() {
return getProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE);
}
@Override
public ImmutableList<SchemaOrgType> getDunsList() {
return getProperty(CoreConstants.PROPERTY_DUNS);
}
@Override
public ImmutableList<SchemaOrgType> getEmailList() {
return getProperty(CoreConstants.PROPERTY_EMAIL);
}
@Override
public ImmutableList<SchemaOrgType> getEmployeeList() {
return getProperty(CoreConstants.PROPERTY_EMPLOYEE);
}
@Override
public ImmutableList<SchemaOrgType> getEmployeesList() {
return getProperty(CoreConstants.PROPERTY_EMPLOYEES);
}
@Override
public ImmutableList<SchemaOrgType> getFounderList() {
return getProperty(CoreConstants.PROPERTY_FOUNDER);
}
@Override
public ImmutableList<SchemaOrgType> getFoundersList() {
return getProperty(CoreConstants.PROPERTY_FOUNDERS);
}
@Override
public ImmutableList<SchemaOrgType> getFoundingDateList() {
return getProperty(CoreConstants.PROPERTY_FOUNDING_DATE);
}
@Override
public ImmutableList<SchemaOrgType> getFoundingLocationList() {
return getProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION);
}
@Override
public ImmutableList<SchemaOrgType> getHasOfferCatalogList() {
return getProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG);
}
@Override
public ImmutableList<SchemaOrgType> getHasPOSList() {
return getProperty(CoreConstants.PROPERTY_HAS_POS);
}
@Override
public ImmutableList<SchemaOrgType> getLegalNameList() {
return getProperty(CoreConstants.PROPERTY_LEGAL_NAME);
}
@Override
public ImmutableList<SchemaOrgType> getLocationList() {
return getProperty(CoreConstants.PROPERTY_LOCATION);
}
@Override
public ImmutableList<SchemaOrgType> getMakesOfferList() {
return getProperty(CoreConstants.PROPERTY_MAKES_OFFER);
}
@Override
public ImmutableList<SchemaOrgType> getMemberList() {
return getProperty(CoreConstants.PROPERTY_MEMBER);
}
@Override
public ImmutableList<SchemaOrgType> getMemberOfList() {
return getProperty(CoreConstants.PROPERTY_MEMBER_OF);
}
@Override
public ImmutableList<SchemaOrgType> getMembersList() {
return getProperty(CoreConstants.PROPERTY_MEMBERS);
}
@Override
public ImmutableList<SchemaOrgType> getNaicsList() {
return getProperty(CoreConstants.PROPERTY_NAICS);
}
@Override
public ImmutableList<SchemaOrgType> getNumberOfEmployeesList() {
return getProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES);
}
@Override
public ImmutableList<SchemaOrgType> getOwnsList() {
return getProperty(CoreConstants.PROPERTY_OWNS);
}
@Override
public ImmutableList<SchemaOrgType> getParentOrganizationList() {
return getProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION);
}
@Override
public ImmutableList<SchemaOrgType> getSeeksList() {
return getProperty(CoreConstants.PROPERTY_SEEKS);
}
@Override
public ImmutableList<SchemaOrgType> getServiceAreaList() {
return getProperty(CoreConstants.PROPERTY_SERVICE_AREA);
}
@Override
public ImmutableList<SchemaOrgType> getSubOrganizationList() {
return getProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION);
}
@Override
public ImmutableList<SchemaOrgType> getTaxIDList() {
return getProperty(CoreConstants.PROPERTY_TAX_ID);
}
@Override
public ImmutableList<SchemaOrgType> getVatIDList() {
return getProperty(CoreConstants.PROPERTY_VAT_ID);
}
}
|
openjdk/jdk8 | 37,663 | jdk/src/share/classes/javax/swing/ProgressMonitor.java | /*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing;
import java.io.*;
import java.awt.BorderLayout;
import java.awt.Frame;
import java.awt.Dialog;
import java.awt.Window;
import java.awt.Component;
import java.awt.Container;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.awt.event.WindowListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.IllegalComponentStateException;
import java.awt.Point;
import java.awt.Rectangle;
import java.text.*;
import java.util.Locale;
import javax.accessibility.*;
import javax.swing.event.*;
import javax.swing.text.*;
/** A class to monitor the progress of some operation. If it looks
* like the operation will take a while, a progress dialog will be popped up.
* When the ProgressMonitor is created it is given a numeric range and a
* descriptive string. As the operation progresses, call the setProgress method
* to indicate how far along the [min,max] range the operation is.
* Initially, there is no ProgressDialog. After the first millisToDecideToPopup
* milliseconds (default 500) the progress monitor will predict how long
* the operation will take. If it is longer than millisToPopup (default 2000,
* 2 seconds) a ProgressDialog will be popped up.
* <p>
* From time to time, when the Dialog box is visible, the progress bar will
* be updated when setProgress is called. setProgress won't always update
* the progress bar, it will only be done if the amount of progress is
* visibly significant.
*
* <p>
*
* For further documentation and examples see
* <a
href="http://docs.oracle.com/javase/tutorial/uiswing/components/progress.html">How to Monitor Progress</a>,
* a section in <em>The Java Tutorial.</em>
*
* @see ProgressMonitorInputStream
* @author James Gosling
* @author Lynn Monsanto (accessibility)
*/
public class ProgressMonitor implements Accessible
{
private ProgressMonitor root;
private JDialog dialog;
private JOptionPane pane;
private JProgressBar myBar;
private JLabel noteLabel;
private Component parentComponent;
private String note;
private Object[] cancelOption = null;
private Object message;
private long T0;
private int millisToDecideToPopup = 500;
private int millisToPopup = 2000;
private int min;
private int max;
/**
* Constructs a graphic object that shows progress, typically by filling
* in a rectangular bar as the process nears completion.
*
* @param parentComponent the parent component for the dialog box
* @param message a descriptive message that will be shown
* to the user to indicate what operation is being monitored.
* This does not change as the operation progresses.
* See the message parameters to methods in
* {@link JOptionPane#message}
* for the range of values.
* @param note a short note describing the state of the
* operation. As the operation progresses, you can call
* setNote to change the note displayed. This is used,
* for example, in operations that iterate through a
* list of files to show the name of the file being processes.
* If note is initially null, there will be no note line
* in the dialog box and setNote will be ineffective
* @param min the lower bound of the range
* @param max the upper bound of the range
* @see JDialog
* @see JOptionPane
*/
public ProgressMonitor(Component parentComponent,
Object message,
String note,
int min,
int max) {
this(parentComponent, message, note, min, max, null);
}
private ProgressMonitor(Component parentComponent,
Object message,
String note,
int min,
int max,
ProgressMonitor group) {
this.min = min;
this.max = max;
this.parentComponent = parentComponent;
cancelOption = new Object[1];
cancelOption[0] = UIManager.getString("OptionPane.cancelButtonText");
this.message = message;
this.note = note;
if (group != null) {
root = (group.root != null) ? group.root : group;
T0 = root.T0;
dialog = root.dialog;
}
else {
T0 = System.currentTimeMillis();
}
}
private class ProgressOptionPane extends JOptionPane
{
ProgressOptionPane(Object messageList) {
super(messageList,
JOptionPane.INFORMATION_MESSAGE,
JOptionPane.DEFAULT_OPTION,
null,
ProgressMonitor.this.cancelOption,
null);
}
public int getMaxCharactersPerLineCount() {
return 60;
}
// Equivalent to JOptionPane.createDialog,
// but create a modeless dialog.
// This is necessary because the Solaris implementation doesn't
// support Dialog.setModal yet.
public JDialog createDialog(Component parentComponent, String title) {
final JDialog dialog;
Window window = JOptionPane.getWindowForComponent(parentComponent);
if (window instanceof Frame) {
dialog = new JDialog((Frame)window, title, false);
} else {
dialog = new JDialog((Dialog)window, title, false);
}
if (window instanceof SwingUtilities.SharedOwnerFrame) {
WindowListener ownerShutdownListener =
SwingUtilities.getSharedOwnerFrameShutdownListener();
dialog.addWindowListener(ownerShutdownListener);
}
Container contentPane = dialog.getContentPane();
contentPane.setLayout(new BorderLayout());
contentPane.add(this, BorderLayout.CENTER);
dialog.pack();
dialog.setLocationRelativeTo(parentComponent);
dialog.addWindowListener(new WindowAdapter() {
boolean gotFocus = false;
public void windowClosing(WindowEvent we) {
setValue(cancelOption[0]);
}
public void windowActivated(WindowEvent we) {
// Once window gets focus, set initial focus
if (!gotFocus) {
selectInitialValue();
gotFocus = true;
}
}
});
addPropertyChangeListener(new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent event) {
if(dialog.isVisible() &&
event.getSource() == ProgressOptionPane.this &&
(event.getPropertyName().equals(VALUE_PROPERTY) ||
event.getPropertyName().equals(INPUT_VALUE_PROPERTY))){
dialog.setVisible(false);
dialog.dispose();
}
}
});
return dialog;
}
/////////////////
// Accessibility support for ProgressOptionPane
////////////////
/**
* Gets the AccessibleContext for the ProgressOptionPane
*
* @return the AccessibleContext for the ProgressOptionPane
* @since 1.5
*/
public AccessibleContext getAccessibleContext() {
return ProgressMonitor.this.getAccessibleContext();
}
/*
* Returns the AccessibleJOptionPane
*/
private AccessibleContext getAccessibleJOptionPane() {
return super.getAccessibleContext();
}
}
/**
* Indicate the progress of the operation being monitored.
* If the specified value is >= the maximum, the progress
* monitor is closed.
* @param nv an int specifying the current value, between the
* maximum and minimum specified for this component
* @see #setMinimum
* @see #setMaximum
* @see #close
*/
public void setProgress(int nv) {
if (nv >= max) {
close();
}
else {
if (myBar != null) {
myBar.setValue(nv);
}
else {
long T = System.currentTimeMillis();
long dT = (int)(T-T0);
if (dT >= millisToDecideToPopup) {
int predictedCompletionTime;
if (nv > min) {
predictedCompletionTime = (int)(dT *
(max - min) /
(nv - min));
}
else {
predictedCompletionTime = millisToPopup;
}
if (predictedCompletionTime >= millisToPopup) {
myBar = new JProgressBar();
myBar.setMinimum(min);
myBar.setMaximum(max);
myBar.setValue(nv);
if (note != null) noteLabel = new JLabel(note);
pane = new ProgressOptionPane(new Object[] {message,
noteLabel,
myBar});
dialog = pane.createDialog(parentComponent,
UIManager.getString(
"ProgressMonitor.progressText"));
dialog.show();
}
}
}
}
}
/**
* Indicate that the operation is complete. This happens automatically
* when the value set by setProgress is >= max, but it may be called
* earlier if the operation ends early.
*/
public void close() {
if (dialog != null) {
dialog.setVisible(false);
dialog.dispose();
dialog = null;
pane = null;
myBar = null;
}
}
/**
* Returns the minimum value -- the lower end of the progress value.
*
* @return an int representing the minimum value
* @see #setMinimum
*/
public int getMinimum() {
return min;
}
/**
* Specifies the minimum value.
*
* @param m an int specifying the minimum value
* @see #getMinimum
*/
public void setMinimum(int m) {
if (myBar != null) {
myBar.setMinimum(m);
}
min = m;
}
/**
* Returns the maximum value -- the higher end of the progress value.
*
* @return an int representing the maximum value
* @see #setMaximum
*/
public int getMaximum() {
return max;
}
/**
* Specifies the maximum value.
*
* @param m an int specifying the maximum value
* @see #getMaximum
*/
public void setMaximum(int m) {
if (myBar != null) {
myBar.setMaximum(m);
}
max = m;
}
/**
* Returns true if the user hits the Cancel button in the progress dialog.
*/
public boolean isCanceled() {
if (pane == null) return false;
Object v = pane.getValue();
return ((v != null) &&
(cancelOption.length == 1) &&
(v.equals(cancelOption[0])));
}
/**
* Specifies the amount of time to wait before deciding whether or
* not to popup a progress monitor.
*
* @param millisToDecideToPopup an int specifying the time to wait,
* in milliseconds
* @see #getMillisToDecideToPopup
*/
public void setMillisToDecideToPopup(int millisToDecideToPopup) {
this.millisToDecideToPopup = millisToDecideToPopup;
}
/**
* Returns the amount of time this object waits before deciding whether
* or not to popup a progress monitor.
*
* @see #setMillisToDecideToPopup
*/
public int getMillisToDecideToPopup() {
return millisToDecideToPopup;
}
/**
* Specifies the amount of time it will take for the popup to appear.
* (If the predicted time remaining is less than this time, the popup
* won't be displayed.)
*
* @param millisToPopup an int specifying the time in milliseconds
* @see #getMillisToPopup
*/
public void setMillisToPopup(int millisToPopup) {
this.millisToPopup = millisToPopup;
}
/**
* Returns the amount of time it will take for the popup to appear.
*
* @see #setMillisToPopup
*/
public int getMillisToPopup() {
return millisToPopup;
}
/**
* Specifies the additional note that is displayed along with the
* progress message. Used, for example, to show which file the
* is currently being copied during a multiple-file copy.
*
* @param note a String specifying the note to display
* @see #getNote
*/
public void setNote(String note) {
this.note = note;
if (noteLabel != null) {
noteLabel.setText(note);
}
}
/**
* Specifies the additional note that is displayed along with the
* progress message.
*
* @return a String specifying the note to display
* @see #setNote
*/
public String getNote() {
return note;
}
/////////////////
// Accessibility support
////////////////
/**
* The <code>AccessibleContext</code> for the <code>ProgressMonitor</code>
* @since 1.5
*/
protected AccessibleContext accessibleContext = null;
private AccessibleContext accessibleJOptionPane = null;
/**
* Gets the <code>AccessibleContext</code> for the
* <code>ProgressMonitor</code>
*
* @return the <code>AccessibleContext</code> for the
* <code>ProgressMonitor</code>
* @since 1.5
*/
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = new AccessibleProgressMonitor();
}
if (pane != null && accessibleJOptionPane == null) {
// Notify the AccessibleProgressMonitor that the
// ProgressOptionPane was created. It is necessary
// to poll for ProgressOptionPane creation because
// the ProgressMonitor does not have a Component
// to add a listener to until the ProgressOptionPane
// is created.
if (accessibleContext instanceof AccessibleProgressMonitor) {
((AccessibleProgressMonitor)accessibleContext).optionPaneCreated();
}
}
return accessibleContext;
}
/**
* <code>AccessibleProgressMonitor</code> implements accessibility
* support for the <code>ProgressMonitor</code> class.
* @since 1.5
*/
protected class AccessibleProgressMonitor extends AccessibleContext
implements AccessibleText, ChangeListener, PropertyChangeListener {
/*
* The accessibility hierarchy for ProgressMonitor is a flattened
* version of the ProgressOptionPane component hierarchy.
*
* The ProgressOptionPane component hierarchy is:
* JDialog
* ProgressOptionPane
* JPanel
* JPanel
* JLabel
* JLabel
* JProgressBar
*
* The AccessibleProgessMonitor accessibility hierarchy is:
* AccessibleJDialog
* AccessibleProgressMonitor
* AccessibleJLabel
* AccessibleJLabel
* AccessibleJProgressBar
*
* The abstraction presented to assitive technologies by
* the AccessibleProgressMonitor is that a dialog contains a
* progress monitor with three children: a message, a note
* label and a progress bar.
*/
private Object oldModelValue;
/**
* AccessibleProgressMonitor constructor
*/
protected AccessibleProgressMonitor() {
}
/*
* Initializes the AccessibleContext now that the ProgressOptionPane
* has been created. Because the ProgressMonitor is not a Component
* implementing the Accessible interface, an AccessibleContext
* must be synthesized from the ProgressOptionPane and its children.
*
* For other AWT and Swing classes, the inner class that implements
* accessibility for the class extends the inner class that implements
* implements accessibility for the super class. AccessibleProgressMonitor
* cannot extend AccessibleJOptionPane and must therefore delegate calls
* to the AccessibleJOptionPane.
*/
private void optionPaneCreated() {
accessibleJOptionPane =
((ProgressOptionPane)pane).getAccessibleJOptionPane();
// add a listener for progress bar ChangeEvents
if (myBar != null) {
myBar.addChangeListener(this);
}
// add a listener for note label PropertyChangeEvents
if (noteLabel != null) {
noteLabel.addPropertyChangeListener(this);
}
}
/**
* Invoked when the target of the listener has changed its state.
*
* @param e a <code>ChangeEvent</code> object. Must not be null.
* @throws NullPointerException if the parameter is null.
*/
public void stateChanged(ChangeEvent e) {
if (e == null) {
return;
}
if (myBar != null) {
// the progress bar value changed
Object newModelValue = myBar.getValue();
firePropertyChange(ACCESSIBLE_VALUE_PROPERTY,
oldModelValue,
newModelValue);
oldModelValue = newModelValue;
}
}
/**
* This method gets called when a bound property is changed.
*
* @param e A <code>PropertyChangeEvent</code> object describing
* the event source and the property that has changed. Must not be null.
* @throws NullPointerException if the parameter is null.
*/
public void propertyChange(PropertyChangeEvent e) {
if (e.getSource() == noteLabel && e.getPropertyName() == "text") {
// the note label text changed
firePropertyChange(ACCESSIBLE_TEXT_PROPERTY, null, 0);
}
}
/* ===== Begin AccessileContext ===== */
/**
* Gets the accessibleName property of this object. The accessibleName
* property of an object is a localized String that designates the purpose
* of the object. For example, the accessibleName property of a label
* or button might be the text of the label or button itself. In the
* case of an object that doesn't display its name, the accessibleName
* should still be set. For example, in the case of a text field used
* to enter the name of a city, the accessibleName for the en_US locale
* could be 'city.'
*
* @return the localized name of the object; null if this
* object does not have a name
*
* @see #setAccessibleName
*/
public String getAccessibleName() {
if (accessibleName != null) { // defined in AccessibleContext
return accessibleName;
} else if (accessibleJOptionPane != null) {
// delegate to the AccessibleJOptionPane
return accessibleJOptionPane.getAccessibleName();
}
return null;
}
/**
* Gets the accessibleDescription property of this object. The
* accessibleDescription property of this object is a short localized
* phrase describing the purpose of the object. For example, in the
* case of a 'Cancel' button, the accessibleDescription could be
* 'Ignore changes and close dialog box.'
*
* @return the localized description of the object; null if
* this object does not have a description
*
* @see #setAccessibleDescription
*/
public String getAccessibleDescription() {
if (accessibleDescription != null) { // defined in AccessibleContext
return accessibleDescription;
} else if (accessibleJOptionPane != null) {
// delegate to the AccessibleJOptionPane
return accessibleJOptionPane.getAccessibleDescription();
}
return null;
}
/**
* Gets the role of this object. The role of the object is the generic
* purpose or use of the class of this object. For example, the role
* of a push button is AccessibleRole.PUSH_BUTTON. The roles in
* AccessibleRole are provided so component developers can pick from
* a set of predefined roles. This enables assistive technologies to
* provide a consistent interface to various tweaked subclasses of
* components (e.g., use AccessibleRole.PUSH_BUTTON for all components
* that act like a push button) as well as distinguish between subclasses
* that behave differently (e.g., AccessibleRole.CHECK_BOX for check boxes
* and AccessibleRole.RADIO_BUTTON for radio buttons).
* <p>Note that the AccessibleRole class is also extensible, so
* custom component developers can define their own AccessibleRole's
* if the set of predefined roles is inadequate.
*
* @return an instance of AccessibleRole describing the role of the object
* @see AccessibleRole
*/
public AccessibleRole getAccessibleRole() {
return AccessibleRole.PROGRESS_MONITOR;
}
/**
* Gets the state set of this object. The AccessibleStateSet of an object
* is composed of a set of unique AccessibleStates. A change in the
* AccessibleStateSet of an object will cause a PropertyChangeEvent to
* be fired for the ACCESSIBLE_STATE_PROPERTY property.
*
* @return an instance of AccessibleStateSet containing the
* current state set of the object
* @see AccessibleStateSet
* @see AccessibleState
* @see #addPropertyChangeListener
*/
public AccessibleStateSet getAccessibleStateSet() {
if (accessibleJOptionPane != null) {
// delegate to the AccessibleJOptionPane
return accessibleJOptionPane.getAccessibleStateSet();
}
return null;
}
/**
* Gets the Accessible parent of this object.
*
* @return the Accessible parent of this object; null if this
* object does not have an Accessible parent
*/
public Accessible getAccessibleParent() {
return dialog;
}
/*
* Returns the parent AccessibleContext
*/
private AccessibleContext getParentAccessibleContext() {
if (dialog != null) {
return dialog.getAccessibleContext();
}
return null;
}
/**
* Gets the 0-based index of this object in its accessible parent.
*
* @return the 0-based index of this object in its parent; -1 if this
* object does not have an accessible parent.
*
* @see #getAccessibleParent
* @see #getAccessibleChildrenCount
* @see #getAccessibleChild
*/
public int getAccessibleIndexInParent() {
if (accessibleJOptionPane != null) {
// delegate to the AccessibleJOptionPane
return accessibleJOptionPane.getAccessibleIndexInParent();
}
return -1;
}
/**
* Returns the number of accessible children of the object.
*
* @return the number of accessible children of the object.
*/
public int getAccessibleChildrenCount() {
// return the number of children in the JPanel containing
// the message, note label and progress bar
AccessibleContext ac = getPanelAccessibleContext();
if (ac != null) {
return ac.getAccessibleChildrenCount();
}
return 0;
}
/**
* Returns the specified Accessible child of the object. The Accessible
* children of an Accessible object are zero-based, so the first child
* of an Accessible child is at index 0, the second child is at index 1,
* and so on.
*
* @param i zero-based index of child
* @return the Accessible child of the object
* @see #getAccessibleChildrenCount
*/
public Accessible getAccessibleChild(int i) {
// return a child in the JPanel containing the message, note label
// and progress bar
AccessibleContext ac = getPanelAccessibleContext();
if (ac != null) {
return ac.getAccessibleChild(i);
}
return null;
}
/*
* Returns the AccessibleContext for the JPanel containing the
* message, note label and progress bar
*/
private AccessibleContext getPanelAccessibleContext() {
if (myBar != null) {
Component c = myBar.getParent();
if (c instanceof Accessible) {
return c.getAccessibleContext();
}
}
return null;
}
/**
* Gets the locale of the component. If the component does not have a
* locale, then the locale of its parent is returned.
*
* @return this component's locale. If this component does not have
* a locale, the locale of its parent is returned.
*
* @exception IllegalComponentStateException
* If the Component does not have its own locale and has not yet been
* added to a containment hierarchy such that the locale can be
* determined from the containing parent.
*/
public Locale getLocale() throws IllegalComponentStateException {
if (accessibleJOptionPane != null) {
// delegate to the AccessibleJOptionPane
return accessibleJOptionPane.getLocale();
}
return null;
}
/* ===== end AccessibleContext ===== */
/**
* Gets the AccessibleComponent associated with this object that has a
* graphical representation.
*
* @return AccessibleComponent if supported by object; else return null
* @see AccessibleComponent
*/
public AccessibleComponent getAccessibleComponent() {
if (accessibleJOptionPane != null) {
// delegate to the AccessibleJOptionPane
return accessibleJOptionPane.getAccessibleComponent();
}
return null;
}
/**
* Gets the AccessibleValue associated with this object that supports a
* Numerical value.
*
* @return AccessibleValue if supported by object; else return null
* @see AccessibleValue
*/
public AccessibleValue getAccessibleValue() {
if (myBar != null) {
// delegate to the AccessibleJProgressBar
return myBar.getAccessibleContext().getAccessibleValue();
}
return null;
}
/**
* Gets the AccessibleText associated with this object presenting
* text on the display.
*
* @return AccessibleText if supported by object; else return null
* @see AccessibleText
*/
public AccessibleText getAccessibleText() {
if (getNoteLabelAccessibleText() != null) {
return this;
}
return null;
}
/*
* Returns the note label AccessibleText
*/
private AccessibleText getNoteLabelAccessibleText() {
if (noteLabel != null) {
// AccessibleJLabel implements AccessibleText if the
// JLabel contains HTML text
return noteLabel.getAccessibleContext().getAccessibleText();
}
return null;
}
/* ===== Begin AccessibleText impl ===== */
/**
* Given a point in local coordinates, return the zero-based index
* of the character under that Point. If the point is invalid,
* this method returns -1.
*
* @param p the Point in local coordinates
* @return the zero-based index of the character under Point p; if
* Point is invalid return -1.
*/
public int getIndexAtPoint(Point p) {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null && sameWindowAncestor(pane, noteLabel)) {
// convert point from the option pane bounds
// to the note label bounds.
Point noteLabelPoint = SwingUtilities.convertPoint(pane,
p,
noteLabel);
if (noteLabelPoint != null) {
return at.getIndexAtPoint(noteLabelPoint);
}
}
return -1;
}
/**
* Determines the bounding box of the character at the given
* index into the string. The bounds are returned in local
* coordinates. If the index is invalid an empty rectangle is returned.
*
* @param i the index into the String
* @return the screen coordinates of the character's bounding box,
* if index is invalid return an empty rectangle.
*/
public Rectangle getCharacterBounds(int i) {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null && sameWindowAncestor(pane, noteLabel)) {
// return rectangle in the option pane bounds
Rectangle noteLabelRect = at.getCharacterBounds(i);
if (noteLabelRect != null) {
return SwingUtilities.convertRectangle(noteLabel,
noteLabelRect,
pane);
}
}
return null;
}
/*
* Returns whether source and destination components have the
* same window ancestor
*/
private boolean sameWindowAncestor(Component src, Component dest) {
if (src == null || dest == null) {
return false;
}
return SwingUtilities.getWindowAncestor(src) ==
SwingUtilities.getWindowAncestor(dest);
}
/**
* Returns the number of characters (valid indicies)
*
* @return the number of characters
*/
public int getCharCount() {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getCharCount();
}
return -1;
}
/**
* Returns the zero-based offset of the caret.
*
* Note: That to the right of the caret will have the same index
* value as the offset (the caret is between two characters).
* @return the zero-based offset of the caret.
*/
public int getCaretPosition() {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getCaretPosition();
}
return -1;
}
/**
* Returns the String at a given index.
*
* @param part the CHARACTER, WORD, or SENTENCE to retrieve
* @param index an index within the text
* @return the letter, word, or sentence
*/
public String getAtIndex(int part, int index) {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getAtIndex(part, index);
}
return null;
}
/**
* Returns the String after a given index.
*
* @param part the CHARACTER, WORD, or SENTENCE to retrieve
* @param index an index within the text
* @return the letter, word, or sentence
*/
public String getAfterIndex(int part, int index) {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getAfterIndex(part, index);
}
return null;
}
/**
* Returns the String before a given index.
*
* @param part the CHARACTER, WORD, or SENTENCE to retrieve
* @param index an index within the text
* @return the letter, word, or sentence
*/
public String getBeforeIndex(int part, int index) {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getBeforeIndex(part, index);
}
return null;
}
/**
* Returns the AttributeSet for a given character at a given index
*
* @param i the zero-based index into the text
* @return the AttributeSet of the character
*/
public AttributeSet getCharacterAttribute(int i) {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getCharacterAttribute(i);
}
return null;
}
/**
* Returns the start offset within the selected text.
* If there is no selection, but there is
* a caret, the start and end offsets will be the same.
*
* @return the index into the text of the start of the selection
*/
public int getSelectionStart() {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getSelectionStart();
}
return -1;
}
/**
* Returns the end offset within the selected text.
* If there is no selection, but there is
* a caret, the start and end offsets will be the same.
*
* @return the index into the text of the end of the selection
*/
public int getSelectionEnd() {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getSelectionEnd();
}
return -1;
}
/**
* Returns the portion of the text that is selected.
*
* @return the String portion of the text that is selected
*/
public String getSelectedText() {
AccessibleText at = getNoteLabelAccessibleText();
if (at != null) { // JLabel contains HTML text
return at.getSelectedText();
}
return null;
}
/* ===== End AccessibleText impl ===== */
}
// inner class AccessibleProgressMonitor
}
|
googleapis/google-cloud-java | 37,393 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/ImportTestCasesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/test_case.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* The request message for
* [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3beta1.TestCases.ImportTestCases].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest}
*/
public final class ImportTestCasesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest)
ImportTestCasesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ImportTestCasesRequest.newBuilder() to construct.
private ImportTestCasesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ImportTestCasesRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ImportTestCasesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest.Builder.class);
}
private int sourceCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object source_;
public enum SourceCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
GCS_URI(2),
CONTENT(3),
SOURCE_NOT_SET(0);
private final int value;
private SourceCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SourceCase valueOf(int value) {
return forNumber(value);
}
public static SourceCase forNumber(int value) {
switch (value) {
case 2:
return GCS_URI;
case 3:
return CONTENT;
case 0:
return SOURCE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public SourceCase getSourceCase() {
return SourceCase.forNumber(sourceCase_);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to import test cases to.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The agent to import test cases to.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GCS_URI_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @return Whether the gcsUri field is set.
*/
public boolean hasGcsUri() {
return sourceCase_ == 2;
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @return The gcsUri.
*/
public java.lang.String getGcsUri() {
java.lang.Object ref = "";
if (sourceCase_ == 2) {
ref = source_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (sourceCase_ == 2) {
source_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @return The bytes for gcsUri.
*/
public com.google.protobuf.ByteString getGcsUriBytes() {
java.lang.Object ref = "";
if (sourceCase_ == 2) {
ref = source_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (sourceCase_ == 2) {
source_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONTENT_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Uncompressed raw byte content for test cases.
* </pre>
*
* <code>bytes content = 3;</code>
*
* @return Whether the content field is set.
*/
@java.lang.Override
public boolean hasContent() {
return sourceCase_ == 3;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for test cases.
* </pre>
*
* <code>bytes content = 3;</code>
*
* @return The content.
*/
@java.lang.Override
public com.google.protobuf.ByteString getContent() {
if (sourceCase_ == 3) {
return (com.google.protobuf.ByteString) source_;
}
return com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (sourceCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, source_);
}
if (sourceCase_ == 3) {
output.writeBytes(3, (com.google.protobuf.ByteString) source_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (sourceCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, source_);
}
if (sourceCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeBytesSize(
3, (com.google.protobuf.ByteString) source_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest other =
(com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getSourceCase().equals(other.getSourceCase())) return false;
switch (sourceCase_) {
case 2:
if (!getGcsUri().equals(other.getGcsUri())) return false;
break;
case 3:
if (!getContent().equals(other.getContent())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
switch (sourceCase_) {
case 2:
hash = (37 * hash) + GCS_URI_FIELD_NUMBER;
hash = (53 * hash) + getGcsUri().hashCode();
break;
case 3:
hash = (37 * hash) + CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getContent().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3beta1.TestCases.ImportTestCases].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest)
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
sourceCase_ = 0;
source_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ImportTestCasesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest build() {
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest result =
new com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
}
private void buildPartialOneofs(
com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest result) {
result.sourceCase_ = sourceCase_;
result.source_ = this.source_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest other) {
if (other
== com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
switch (other.getSourceCase()) {
case GCS_URI:
{
sourceCase_ = 2;
source_ = other.source_;
onChanged();
break;
}
case CONTENT:
{
setContent(other.getContent());
break;
}
case SOURCE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
sourceCase_ = 2;
source_ = s;
break;
} // case 18
case 26:
{
source_ = input.readBytes();
sourceCase_ = 3;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int sourceCase_ = 0;
private java.lang.Object source_;
public SourceCase getSourceCase() {
return SourceCase.forNumber(sourceCase_);
}
public Builder clearSource() {
sourceCase_ = 0;
source_ = null;
onChanged();
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to import test cases to.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to import test cases to.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to import test cases to.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to import test cases to.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to import test cases to.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @return Whether the gcsUri field is set.
*/
@java.lang.Override
public boolean hasGcsUri() {
return sourceCase_ == 2;
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @return The gcsUri.
*/
@java.lang.Override
public java.lang.String getGcsUri() {
java.lang.Object ref = "";
if (sourceCase_ == 2) {
ref = source_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (sourceCase_ == 2) {
source_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @return The bytes for gcsUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getGcsUriBytes() {
java.lang.Object ref = "";
if (sourceCase_ == 2) {
ref = source_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (sourceCase_ == 2) {
source_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @param value The gcsUri to set.
* @return This builder for chaining.
*/
public Builder setGcsUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sourceCase_ = 2;
source_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearGcsUri() {
if (sourceCase_ == 2) {
sourceCase_ = 0;
source_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI
* to import test cases from. The format of this URI must be
* `gs://<bucket-name>/<object-name>`.
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string gcs_uri = 2;</code>
*
* @param value The bytes for gcsUri to set.
* @return This builder for chaining.
*/
public Builder setGcsUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sourceCase_ = 2;
source_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for test cases.
* </pre>
*
* <code>bytes content = 3;</code>
*
* @return Whether the content field is set.
*/
public boolean hasContent() {
return sourceCase_ == 3;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for test cases.
* </pre>
*
* <code>bytes content = 3;</code>
*
* @return The content.
*/
public com.google.protobuf.ByteString getContent() {
if (sourceCase_ == 3) {
return (com.google.protobuf.ByteString) source_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for test cases.
* </pre>
*
* <code>bytes content = 3;</code>
*
* @param value The content to set.
* @return This builder for chaining.
*/
public Builder setContent(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
sourceCase_ = 3;
source_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for test cases.
* </pre>
*
* <code>bytes content = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearContent() {
if (sourceCase_ == 3) {
sourceCase_ = 0;
source_ = null;
onChanged();
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest)
private static final com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest();
}
public static com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ImportTestCasesRequest> PARSER =
new com.google.protobuf.AbstractParser<ImportTestCasesRequest>() {
@java.lang.Override
public ImportTestCasesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ImportTestCasesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ImportTestCasesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ImportTestCasesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,453 | java-gkehub/proto-google-cloud-gkehub-v1beta/src/main/java/com/google/cloud/gkehub/servicemesh/v1beta/MembershipSpec.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.servicemesh.v1beta;
/**
*
*
* <pre>
* **Service Mesh**: Spec for a single Membership for the servicemesh feature
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.servicemesh.v1beta.MembershipSpec}
*/
public final class MembershipSpec extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.servicemesh.v1beta.MembershipSpec)
MembershipSpecOrBuilder {
private static final long serialVersionUID = 0L;
// Use MembershipSpec.newBuilder() to construct.
private MembershipSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MembershipSpec() {
controlPlane_ = 0;
management_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MembershipSpec();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.servicemesh.v1beta.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1beta_MembershipSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.servicemesh.v1beta.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1beta_MembershipSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.class,
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Builder.class);
}
/**
*
*
* <pre>
* Whether to automatically manage Service Mesh control planes.
* </pre>
*
* Protobuf enum {@code
* google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement}
*/
public enum ControlPlaneManagement implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified
* </pre>
*
* <code>CONTROL_PLANE_MANAGEMENT_UNSPECIFIED = 0;</code>
*/
CONTROL_PLANE_MANAGEMENT_UNSPECIFIED(0),
/**
*
*
* <pre>
* Google should provision a control plane revision and make it available in
* the cluster. Google will enroll this revision in a release channel and
* keep it up to date. The control plane revision may be a managed service,
* or a managed install.
* </pre>
*
* <code>AUTOMATIC = 1;</code>
*/
AUTOMATIC(1),
/**
*
*
* <pre>
* User will manually configure the control plane (e.g. via CLI, or via the
* ControlPlaneRevision KRM API)
* </pre>
*
* <code>MANUAL = 2;</code>
*/
MANUAL(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified
* </pre>
*
* <code>CONTROL_PLANE_MANAGEMENT_UNSPECIFIED = 0;</code>
*/
public static final int CONTROL_PLANE_MANAGEMENT_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Google should provision a control plane revision and make it available in
* the cluster. Google will enroll this revision in a release channel and
* keep it up to date. The control plane revision may be a managed service,
* or a managed install.
* </pre>
*
* <code>AUTOMATIC = 1;</code>
*/
public static final int AUTOMATIC_VALUE = 1;
/**
*
*
* <pre>
* User will manually configure the control plane (e.g. via CLI, or via the
* ControlPlaneRevision KRM API)
* </pre>
*
* <code>MANUAL = 2;</code>
*/
public static final int MANUAL_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ControlPlaneManagement valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ControlPlaneManagement forNumber(int value) {
switch (value) {
case 0:
return CONTROL_PLANE_MANAGEMENT_UNSPECIFIED;
case 1:
return AUTOMATIC;
case 2:
return MANUAL;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ControlPlaneManagement>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<ControlPlaneManagement>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ControlPlaneManagement>() {
public ControlPlaneManagement findValueByNumber(int number) {
return ControlPlaneManagement.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final ControlPlaneManagement[] VALUES = values();
public static ControlPlaneManagement valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ControlPlaneManagement(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement)
}
/**
*
*
* <pre>
* Whether to automatically manage Service Mesh.
* </pre>
*
* Protobuf enum {@code google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management}
*/
public enum Management implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified
* </pre>
*
* <code>MANAGEMENT_UNSPECIFIED = 0;</code>
*/
MANAGEMENT_UNSPECIFIED(0),
/**
*
*
* <pre>
* Google should manage my Service Mesh for the cluster.
* </pre>
*
* <code>MANAGEMENT_AUTOMATIC = 1;</code>
*/
MANAGEMENT_AUTOMATIC(1),
/**
*
*
* <pre>
* User will manually configure their service mesh components.
* </pre>
*
* <code>MANAGEMENT_MANUAL = 2;</code>
*/
MANAGEMENT_MANUAL(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified
* </pre>
*
* <code>MANAGEMENT_UNSPECIFIED = 0;</code>
*/
public static final int MANAGEMENT_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Google should manage my Service Mesh for the cluster.
* </pre>
*
* <code>MANAGEMENT_AUTOMATIC = 1;</code>
*/
public static final int MANAGEMENT_AUTOMATIC_VALUE = 1;
/**
*
*
* <pre>
* User will manually configure their service mesh components.
* </pre>
*
* <code>MANAGEMENT_MANUAL = 2;</code>
*/
public static final int MANAGEMENT_MANUAL_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Management valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Management forNumber(int value) {
switch (value) {
case 0:
return MANAGEMENT_UNSPECIFIED;
case 1:
return MANAGEMENT_AUTOMATIC;
case 2:
return MANAGEMENT_MANUAL;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Management> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Management> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Management>() {
public Management findValueByNumber(int number) {
return Management.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.getDescriptor()
.getEnumTypes()
.get(1);
}
private static final Management[] VALUES = values();
public static Management valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Management(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management)
}
public static final int CONTROL_PLANE_FIELD_NUMBER = 1;
private int controlPlane_ = 0;
/**
*
*
* <pre>
* Deprecated: use `management` instead
* Enables automatic control plane management.
* </pre>
*
* <code>
* .google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement control_plane = 1 [deprecated = true];
* </code>
*
* @deprecated google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.control_plane is deprecated.
* See google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto;l=60
* @return The enum numeric value on the wire for controlPlane.
*/
@java.lang.Override
@java.lang.Deprecated
public int getControlPlaneValue() {
return controlPlane_;
}
/**
*
*
* <pre>
* Deprecated: use `management` instead
* Enables automatic control plane management.
* </pre>
*
* <code>
* .google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement control_plane = 1 [deprecated = true];
* </code>
*
* @deprecated google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.control_plane is deprecated.
* See google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto;l=60
* @return The controlPlane.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement
getControlPlane() {
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement result =
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement.forNumber(
controlPlane_);
return result == null
? com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement
.UNRECOGNIZED
: result;
}
public static final int MANAGEMENT_FIELD_NUMBER = 4;
private int management_ = 0;
/**
*
*
* <pre>
* Enables automatic Service Mesh management.
* </pre>
*
* <code>.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management management = 4;</code>
*
* @return The enum numeric value on the wire for management.
*/
@java.lang.Override
public int getManagementValue() {
return management_;
}
/**
*
*
* <pre>
* Enables automatic Service Mesh management.
* </pre>
*
* <code>.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management management = 4;</code>
*
* @return The management.
*/
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management getManagement() {
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management result =
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management.forNumber(management_);
return result == null
? com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (controlPlane_
!= com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement
.CONTROL_PLANE_MANAGEMENT_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, controlPlane_);
}
if (management_
!= com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management
.MANAGEMENT_UNSPECIFIED
.getNumber()) {
output.writeEnum(4, management_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (controlPlane_
!= com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement
.CONTROL_PLANE_MANAGEMENT_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, controlPlane_);
}
if (management_
!= com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management
.MANAGEMENT_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, management_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec)) {
return super.equals(obj);
}
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec other =
(com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec) obj;
if (controlPlane_ != other.controlPlane_) return false;
if (management_ != other.management_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CONTROL_PLANE_FIELD_NUMBER;
hash = (53 * hash) + controlPlane_;
hash = (37 * hash) + MANAGEMENT_FIELD_NUMBER;
hash = (53 * hash) + management_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* **Service Mesh**: Spec for a single Membership for the servicemesh feature
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.servicemesh.v1beta.MembershipSpec}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.servicemesh.v1beta.MembershipSpec)
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpecOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.servicemesh.v1beta.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1beta_MembershipSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.servicemesh.v1beta.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1beta_MembershipSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.class,
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Builder.class);
}
// Construct using com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
controlPlane_ = 0;
management_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.servicemesh.v1beta.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1beta_MembershipSpec_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec getDefaultInstanceForType() {
return com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec build() {
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec buildPartial() {
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec result =
new com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.controlPlane_ = controlPlane_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.management_ = management_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec) {
return mergeFrom((com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec other) {
if (other == com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.getDefaultInstance())
return this;
if (other.controlPlane_ != 0) {
setControlPlaneValue(other.getControlPlaneValue());
}
if (other.management_ != 0) {
setManagementValue(other.getManagementValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
controlPlane_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 32:
{
management_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int controlPlane_ = 0;
/**
*
*
* <pre>
* Deprecated: use `management` instead
* Enables automatic control plane management.
* </pre>
*
* <code>
* .google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement control_plane = 1 [deprecated = true];
* </code>
*
* @deprecated google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.control_plane is
* deprecated. See google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto;l=60
* @return The enum numeric value on the wire for controlPlane.
*/
@java.lang.Override
@java.lang.Deprecated
public int getControlPlaneValue() {
return controlPlane_;
}
/**
*
*
* <pre>
* Deprecated: use `management` instead
* Enables automatic control plane management.
* </pre>
*
* <code>
* .google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement control_plane = 1 [deprecated = true];
* </code>
*
* @deprecated google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.control_plane is
* deprecated. See google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto;l=60
* @param value The enum numeric value on the wire for controlPlane to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setControlPlaneValue(int value) {
controlPlane_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated: use `management` instead
* Enables automatic control plane management.
* </pre>
*
* <code>
* .google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement control_plane = 1 [deprecated = true];
* </code>
*
* @deprecated google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.control_plane is
* deprecated. See google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto;l=60
* @return The controlPlane.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement
getControlPlane() {
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement result =
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement
.forNumber(controlPlane_);
return result == null
? com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement
.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Deprecated: use `management` instead
* Enables automatic control plane management.
* </pre>
*
* <code>
* .google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement control_plane = 1 [deprecated = true];
* </code>
*
* @deprecated google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.control_plane is
* deprecated. See google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto;l=60
* @param value The controlPlane to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setControlPlane(
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
controlPlane_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated: use `management` instead
* Enables automatic control plane management.
* </pre>
*
* <code>
* .google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.ControlPlaneManagement control_plane = 1 [deprecated = true];
* </code>
*
* @deprecated google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.control_plane is
* deprecated. See google/cloud/gkehub/servicemesh/v1beta/servicemesh.proto;l=60
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearControlPlane() {
bitField0_ = (bitField0_ & ~0x00000001);
controlPlane_ = 0;
onChanged();
return this;
}
private int management_ = 0;
/**
*
*
* <pre>
* Enables automatic Service Mesh management.
* </pre>
*
* <code>.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management management = 4;
* </code>
*
* @return The enum numeric value on the wire for management.
*/
@java.lang.Override
public int getManagementValue() {
return management_;
}
/**
*
*
* <pre>
* Enables automatic Service Mesh management.
* </pre>
*
* <code>.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management management = 4;
* </code>
*
* @param value The enum numeric value on the wire for management to set.
* @return This builder for chaining.
*/
public Builder setManagementValue(int value) {
management_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Enables automatic Service Mesh management.
* </pre>
*
* <code>.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management management = 4;
* </code>
*
* @return The management.
*/
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management getManagement() {
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management result =
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management.forNumber(
management_);
return result == null
? com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Enables automatic Service Mesh management.
* </pre>
*
* <code>.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management management = 4;
* </code>
*
* @param value The management to set.
* @return This builder for chaining.
*/
public Builder setManagement(
com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
management_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Enables automatic Service Mesh management.
* </pre>
*
* <code>.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec.Management management = 4;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearManagement() {
bitField0_ = (bitField0_ & ~0x00000002);
management_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.servicemesh.v1beta.MembershipSpec)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.servicemesh.v1beta.MembershipSpec)
private static final com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec();
}
public static com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MembershipSpec> PARSER =
new com.google.protobuf.AbstractParser<MembershipSpec>() {
@java.lang.Override
public MembershipSpec parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MembershipSpec> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MembershipSpec> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1beta.MembershipSpec getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,369 | java-telcoautomation/proto-google-cloud-telcoautomation-v1alpha1/src/main/java/com/google/cloud/telcoautomation/v1alpha1/ListBlueprintsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/telcoautomation/v1alpha1/telcoautomation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.telcoautomation.v1alpha1;
/**
*
*
* <pre>
* Request object for `ListBlueprints`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest}
*/
public final class ListBlueprintsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest)
ListBlueprintsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBlueprintsRequest.newBuilder() to construct.
private ListBlueprintsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBlueprintsRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListBlueprintsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListBlueprintsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListBlueprintsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest.class,
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering only supports equality on blueprint state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = PROPOSED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on blueprint state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = PROPOSED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of blueprints to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListBlueprints call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListBlueprints call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest)) {
return super.equals(obj);
}
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest other =
(com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request object for `ListBlueprints`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest)
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListBlueprintsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListBlueprintsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest.class,
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest.Builder.class);
}
// Construct using com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListBlueprintsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest
getDefaultInstanceForType() {
return com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest build() {
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest buildPartial() {
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest result =
new com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest) {
return mergeFrom((com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest other) {
if (other
== com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering only supports equality on blueprint state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = PROPOSED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on blueprint state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = PROPOSED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on blueprint state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = PROPOSED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on blueprint state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = PROPOSED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on blueprint state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = PROPOSED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of blueprints to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of blueprints to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of blueprints to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListBlueprints call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListBlueprints call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListBlueprints call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListBlueprints call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListBlueprints call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest)
private static final com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest();
}
public static com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBlueprintsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListBlueprintsRequest>() {
@java.lang.Override
public ListBlueprintsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBlueprintsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBlueprintsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListBlueprintsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,551 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/BatchUpdateGenerativeQuestionConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/generative_question_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Aggregated response for UpdateGenerativeQuestionConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse}
*/
public final class BatchUpdateGenerativeQuestionConfigsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse)
BatchUpdateGenerativeQuestionConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchUpdateGenerativeQuestionConfigsResponse.newBuilder() to construct.
private BatchUpdateGenerativeQuestionConfigsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchUpdateGenerativeQuestionConfigsResponse() {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchUpdateGenerativeQuestionConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_BatchUpdateGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_BatchUpdateGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse.Builder
.class);
}
public static final int GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
generativeQuestionConfigs_;
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public int getGenerativeQuestionConfigsCount() {
return generativeQuestionConfigs_.size();
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
return generativeQuestionConfigs_.get(index);
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
return generativeQuestionConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
output.writeMessage(1, generativeQuestionConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, generativeQuestionConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse other =
(com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse) obj;
if (!getGenerativeQuestionConfigsList().equals(other.getGenerativeQuestionConfigsList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGenerativeQuestionConfigsCount() > 0) {
hash = (37 * hash) + GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getGenerativeQuestionConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Aggregated response for UpdateGenerativeQuestionConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse)
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_BatchUpdateGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_BatchUpdateGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse.Builder
.class);
}
// Construct using
// com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
} else {
generativeQuestionConfigs_ = null;
generativeQuestionConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_BatchUpdateGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse build() {
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
buildPartial() {
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse result =
new com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse result) {
if (generativeQuestionConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.generativeQuestionConfigs_ = generativeQuestionConfigs_;
} else {
result.generativeQuestionConfigs_ = generativeQuestionConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse) {
return mergeFrom(
(com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse other) {
if (other
== com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
.getDefaultInstance()) return this;
if (generativeQuestionConfigsBuilder_ == null) {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigs_.isEmpty()) {
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.addAll(other.generativeQuestionConfigs_);
}
onChanged();
}
} else {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigsBuilder_.isEmpty()) {
generativeQuestionConfigsBuilder_.dispose();
generativeQuestionConfigsBuilder_ = null;
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
generativeQuestionConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGenerativeQuestionConfigsFieldBuilder()
: null;
} else {
generativeQuestionConfigsBuilder_.addAllMessages(other.generativeQuestionConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2beta.GenerativeQuestionConfig m =
input.readMessage(
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.parser(),
extensionRegistry);
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(m);
} else {
generativeQuestionConfigsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
generativeQuestionConfigs_ = java.util.Collections.emptyList();
private void ensureGenerativeQuestionConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
new java.util.ArrayList<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>(
generativeQuestionConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.GenerativeQuestionConfig,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
generativeQuestionConfigsBuilder_;
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
if (generativeQuestionConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
} else {
return generativeQuestionConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public int getGenerativeQuestionConfigsCount() {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.size();
} else {
return generativeQuestionConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2beta.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2beta.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2beta.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addAllGenerativeQuestionConfigs(
java.lang.Iterable<? extends com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
values) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, generativeQuestionConfigs_);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearGenerativeQuestionConfigs() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
generativeQuestionConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder removeGenerativeQuestionConfigs(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.remove(index);
onChanged();
} else {
generativeQuestionConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder
getGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public java.util.List<
? extends com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
if (generativeQuestionConfigsBuilder_ != null) {
return generativeQuestionConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder() {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2beta.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(
index, com.google.cloud.retail.v2beta.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder>
getGenerativeQuestionConfigsBuilderList() {
return getGenerativeQuestionConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.GenerativeQuestionConfig,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsFieldBuilder() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.GenerativeQuestionConfig,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>(
generativeQuestionConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
generativeQuestionConfigs_ = null;
}
return generativeQuestionConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse)
private static final com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse();
}
public static com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchUpdateGenerativeQuestionConfigsResponse>
PARSER =
new com.google.protobuf.AbstractParser<BatchUpdateGenerativeQuestionConfigsResponse>() {
@java.lang.Override
public BatchUpdateGenerativeQuestionConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchUpdateGenerativeQuestionConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchUpdateGenerativeQuestionConfigsResponse>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.BatchUpdateGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/apidiff | 37,651 | src/share/classes/jdk/codetools/apidiff/model/SerializedFormFactory.java | /*
* Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.codetools.apidiff.model;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.ModuleElement;
import javax.lang.model.element.Name;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.NoType;
import javax.lang.model.type.PrimitiveType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.TypeVariable;
import javax.lang.model.type.TypeVisitor;
import javax.lang.model.util.ElementFilter;
import javax.lang.model.util.Elements;
import javax.lang.model.util.SimpleTypeVisitor14;
import javax.lang.model.util.Types;
import com.sun.source.doctree.DocCommentTree;
import com.sun.source.doctree.DocTree;
import com.sun.source.doctree.ReferenceTree;
import com.sun.source.doctree.SerialFieldTree;
import com.sun.source.doctree.SerialTree;
import com.sun.source.tree.BlockTree;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.VariableTree;
import com.sun.source.util.DocTreePath;
import com.sun.source.util.DocTreePathScanner;
import com.sun.source.util.DocTrees;
import com.sun.source.util.TreePath;
/**
* A factory to create the {@link SerializedForm} object for a
* type element if appropriate.
*
* A serialized form is only created for a type element if
* it is {@code Serializable} but not an enum, and if it
* is marked with {@code @serial include}, or the enclosing
* package is not marked with {@code serial exclude} and
* the type is {@code public} or {@code protected}.
*/
public class SerializedFormFactory {
private final Map<PackageElement, Boolean> excludedPackages;
private final Elements elements;
private final Types types;
private final DocTrees trees;
private final TypeMirror serializable;
private final TypeMirror externalizable;
private final TypeMirror objectInput;
private final TypeMirror objectInputStream;
private final TypeMirror objectOutput;
private final TypeMirror objectOutputStream;
private final TypeMirror objectStreamField;
private final Name readExternal;
private final Name writeExternal;
private final Name readObject;
private final Name readObjectNoData;
private final Name writeObject;
private final Name readResolve;
private final Name writeReplace;
private final Name serialPersistentFields;
private final Name serialVersionUID;
private Set<Modifier> privateStaticFinal = Set.of(Modifier.PRIVATE, Modifier.STATIC, Modifier.FINAL);
/**
* Creates an instance of {@code SerializedFormFactory} using the utility objects for
* {@code Elements}, {@code Types} and {@code Trees} available from an API.
*
* @param api the API
*/
public SerializedFormFactory(API api) {
this(api.getElements(), api.getTypes(), api.getTrees());
}
/**
* Creates an instance of {@code SerializedFormFactory} using the given utility objects.
*
* @param elements the {@code Elements} utility class to be used
* @param types the {@code Types} utility class to be used
* @param trees the {@code DocTrees} utility class to be used
*/
public SerializedFormFactory(Elements elements, Types types, DocTrees trees) {
this.elements = elements;
this.types = types;
this.trees = trees;
excludedPackages = new HashMap<>();
ModuleElement javaBase = elements.getModuleElement("java.base");
serializable = getType(javaBase, "java.io.Serializable");
externalizable = getType(javaBase, "java.io.Externalizable");
objectInput = getType(javaBase, "java.io.ObjectInput");
objectInputStream = getType(javaBase, "java.io.ObjectInputStream");
objectOutput = getType(javaBase, "java.io.ObjectOutput");
objectOutputStream = getType(javaBase, "java.io.ObjectOutputStream");
objectStreamField = getType(javaBase, "java.io.ObjectStreamField");
readExternal = elements.getName("readExternal");
writeExternal = elements.getName("writeExternal");
readObject = elements.getName("readObject");
readObjectNoData = elements.getName("readObjectNoData");
writeObject = elements.getName("writeObject");
readResolve = elements.getName("readResolve");
writeReplace = elements.getName("writeReplace");
serialPersistentFields = elements.getName("serialPersistentFields");
serialVersionUID = elements.getName("serialVersionUID");
}
/**
* Returns the instance of {@code SerializedFormDocs} containing the information
* related to a given type element, or {@code null} if no such information is available.
*
* This implementation returns {@code null}.
*
* @param te the type element
*
* @return the instance of {@code SerializedFormDocs} containing the information
*/
public SerializedFormDocs getSerializedFormDocs(TypeElement te) {
return null;
}
/**
* Returns a type of an element with a given canonical name, as seen from the given module.
*
* @param me the module
* @param name the name
*
* @return the type of the element
*/
private TypeMirror getType(ModuleElement me, String name) {
return elements.getTypeElement(me, name).asType();
}
/**
* Returns the {@code SerializedForm} object for a type element, or null if it does not have one.
*
* @param te the type element
*
* @return the {@code SerializedForm} object
*/
public SerializedForm get(TypeElement te) {
if (!isIncluded(te)) {
return null;
}
long serialVersionUID = getSerialVersionUID(te);
List<SerializedForm.Field> fields;
List<ExecutableElement> methods;
if (types.isAssignable(te.asType(), externalizable)) {
fields = List.of();
methods = getExternalizableMethods(te);
} else {
fields = getSerializableFields(te);
methods = getSerializableMethods(te);
}
SerializedFormDocs docs = getSerializedFormDocs(te);
return new SerializedForm(serialVersionUID, fields, methods, docs);
}
//<editor-fold desc="Inclusion">
/**
* Determines if a type element has a specific serialized form.
*
* A type element has a specific serialized form if
* it is {@code Serializable} but not an enum, and if it
* is marked with {@code @serial include}, or the enclosing
* package is not marked with {@code serial exclude} and
* the type is {@code public} or {@code protected}.
*
* @param te the type element
*
* @return {@code true} if and only if the type element has a specific
* serialized form
*/
private boolean isIncluded(TypeElement te) {
if (te.getKind() == ElementKind.ENUM
|| !types.isAssignable(te.asType(), serializable)) {
return false;
}
Optional<SerialTree> serialTrees = getSerialTrees(te);
if (matches(serialTrees, "include")) {
return true;
}
if (matches(serialTrees, "exclude")
|| excludedPackages.computeIfAbsent(elements.getPackageOf(te),
p -> matches(getSerialTrees(p), "exclude"))) {
return false;
}
Set<Modifier> modifiers = te.getModifiers();
return modifiers.contains(Modifier.PUBLIC)
|| modifiers.contains(Modifier.PROTECTED);
}
/**
* Returns whether an optional {@code SerialTree} object matches the given kind.
* The kind is typically "include" or "exclude".
*
* @param optSerial the optional {@code SerialTree}
* @param kind the kind
*
* @return {@code true} if and only if a match is found
*/
private boolean matches(Optional<SerialTree> optSerial, String kind) {
return optSerial.isPresent() && optSerial.get().getDescription().toString().equals(kind);
}
/**
* Returns the {@code {@serial ...}} tag, if any, in the doc comment for an element.
*
* @param e the element
*
* @return the tag
*/
private Optional<SerialTree> getSerialTrees(Element e) {
DocCommentTree dct = trees.getDocCommentTree(e);
if (dct == null) {
return Optional.empty();
}
return dct.getBlockTags().stream()
.filter(t -> t.getKind() == DocTree.Kind.SERIAL)
.map(t -> (SerialTree) t)
.findFirst();
}
//</editor-fold>
//<editor-fold desc="Serial Version UID">
/**
* Returns the {@code serialVersionUID} for a type element.
* If the type element defines an appropriate field, the constant value
* of the field is returned; otherwise, the default value is computed.
*
* @param te the type element
*
* @return the serial version UID
*/
private long getSerialVersionUID(TypeElement te) {
VariableElement ve = te.getEnclosedElements().stream()
.filter(e -> e.getKind() == ElementKind.FIELD
&& e.getSimpleName() == serialVersionUID)
.map(e -> (VariableElement) e)
.findFirst()
.orElse(null);
if (ve != null
&& ve.getModifiers().contains(Modifier.STATIC)
&& ve.getModifiers().contains(Modifier.FINAL)
&& types.isSameType(ve.asType(), types.getPrimitiveType(TypeKind.LONG))) {
Object o = ve.getConstantValue();
if (o instanceof Long) {
return (Long) o;
}
}
return computeDefaultSUID(te);
}
/**
* Computes the default serial version UID value for the given class.
*
* This code is translated from the corresponding code in {@code java.io.ObjectStreamClass},
* converting it from using runtime reflection to compile-time reflection.
*/
private long computeDefaultSUID(TypeElement te) {
try {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
DataOutputStream dout = new DataOutputStream(bout);
dout.writeUTF(te.getQualifiedName().toString());
int classMods = IntModifier.getModifiers(te) &
(IntModifier.PUBLIC | IntModifier.FINAL |
IntModifier.INTERFACE | IntModifier.ABSTRACT);
/*
* compensate for javac bug in which ABSTRACT bit was set for an
* interface only if the interface declared methods
*/
List<ExecutableElement> methods = ElementFilter.methodsIn(te.getEnclosedElements());
if ((classMods & IntModifier.INTERFACE) != 0) {
classMods = (methods.size() > 0) ?
(classMods | IntModifier.ABSTRACT) :
(classMods & ~IntModifier.ABSTRACT);
}
dout.writeInt(classMods);
if (te.asType().getKind() != TypeKind.ARRAY) {
/*
* compensate for change in 1.2FCS in which
* Class.getInterfaces() was modified to return Cloneable and
* Serializable for array classes.
*/
List<? extends TypeMirror> interfaces = te.getInterfaces();
List<String> ifaceNames = interfaces.stream()
.map(SerializedFormFactory::getInterfaceName)
.sorted()
.collect(Collectors.toList());
for (String n : ifaceNames) {
dout.writeUTF(n);
}
}
List<? extends VariableElement> fields = ElementFilter.fieldsIn(te.getEnclosedElements());
List<MemberSignature> fieldSigs = fields.stream()
.map(MemberSignature::new)
.sorted(Comparator.comparing(ms -> ms.name))
.collect(Collectors.toList());
for (MemberSignature sig : fieldSigs) {
int mods = IntModifier.getModifiers(sig.member) &
(IntModifier.PUBLIC | IntModifier.PRIVATE | IntModifier.PROTECTED |
IntModifier.STATIC | IntModifier.FINAL | IntModifier.VOLATILE |
IntModifier.TRANSIENT);
if (((mods & IntModifier.PRIVATE) == 0) ||
((mods & (IntModifier.STATIC | IntModifier.TRANSIENT)) == 0))
{
dout.writeUTF(sig.name);
dout.writeInt(mods);
dout.writeUTF(sig.signature);
}
}
if (hasStaticInitializer(te)) {
dout.writeUTF("<clinit>");
dout.writeInt(IntModifier.STATIC);
dout.writeUTF("()V");
}
List<? extends ExecutableElement> cons = ElementFilter.constructorsIn(te.getEnclosedElements());
List<MemberSignature> consSigs = cons.stream()
.map(MemberSignature::new)
.sorted(Comparator.comparing(ms -> ms.signature))
.collect(Collectors.toList());
for (MemberSignature sig : consSigs) {
int mods = IntModifier.getModifiers(sig.member) &
(IntModifier.PUBLIC | IntModifier.PRIVATE | IntModifier.PROTECTED |
IntModifier.STATIC | IntModifier.FINAL |
IntModifier.SYNCHRONIZED | IntModifier.NATIVE |
IntModifier.ABSTRACT | IntModifier.STRICT);
if ((mods & IntModifier.PRIVATE) == 0) {
dout.writeUTF("<init>");
dout.writeInt(mods);
dout.writeUTF(sig.signature.replace('/', '.'));
}
}
List<MemberSignature> methSigs = methods.stream()
.map(MemberSignature::new)
.sorted(Comparator.comparing((MemberSignature ms) -> ms.name)
.thenComparing(ms -> ms.signature))
.collect(Collectors.toList());
for (MemberSignature sig : methSigs) {
int mods = IntModifier.getModifiers(sig.member) &
(IntModifier.PUBLIC | IntModifier.PRIVATE | IntModifier.PROTECTED |
IntModifier.STATIC | IntModifier.FINAL |
IntModifier.SYNCHRONIZED | IntModifier.NATIVE |
IntModifier.ABSTRACT | IntModifier.STRICT);
if ((mods & IntModifier.PRIVATE) == 0) {
dout.writeUTF(sig.name);
dout.writeInt(mods);
dout.writeUTF(sig.signature.replace('/', '.'));
}
}
dout.flush();
MessageDigest md = MessageDigest.getInstance("SHA");
byte[] hashBytes = md.digest(bout.toByteArray());
long hash = 0;
for (int i = Math.min(hashBytes.length, 8) - 1; i >= 0; i--) {
hash = (hash << 8) | (hashBytes[i] & 0xFF);
}
return hash;
} catch (IOException ex) {
throw new InternalError(ex);
} catch (NoSuchAlgorithmException ex) {
throw new InternalError(ex.getMessage());
}
}
/**
* Returns the fully qualified name for a type mirror representing an interface,
* such as found in the superinterfaces of a class.
*
* @param t the type mirror
*
* @return the name
*/
private static String getInterfaceName(TypeMirror t) {
Element e = ((DeclaredType) t).asElement();
return ((TypeElement) e).getQualifiedName().toString();
}
/**
* Returns whether a type element has, or will have, a static initializer.
* A type has a static initializer if it has an executable member named {@code <clinit>}.
* This may arise due to explicit presence of {@code static { ... }} in source code,
* or to hold the initialization of static fields with a non-constant value.
*
* @param te the type element
*
* @return if the type element has or will have a static initializer
*/
private boolean hasStaticInitializer(TypeElement te) {
if (te.getEnclosedElements().stream().anyMatch(e -> e.getKind() == ElementKind.STATIC_INIT)) {
return true;
}
// if the source is available, scan the AST for the element, looking for
// either 'static { ... }' or static variables with non-constant initializers
TreePath p = trees.getPath(te);
if (p != null && p.getLeaf() instanceof ClassTree) {
ClassTree ct = (ClassTree) p.getLeaf();
for (Tree t : ct.getMembers()) {
switch (t.getKind()) {
case BLOCK -> {
BlockTree bt = (BlockTree) t;
if (bt.isStatic()) {
// found an explicit static initializer block
return true;
}
}
case VARIABLE -> {
VariableTree vt = (VariableTree) t;
if (vt.getModifiers().getFlags().contains(Modifier.STATIC)
&& vt.getInitializer() != null) {
Element e = trees.getElement(new TreePath(p, vt));
if (e != null && e.getKind() == ElementKind.FIELD) {
Object cv = ((VariableElement) e).getConstantValue();
if (cv == null) {
// found field with an initializer that is not a constant
// expression, and so will require an implicit static initializer block
return true;
}
}
}
}
}
}
}
return false;
}
/**
* A wrapper around runtime modifiers.
* These are distinct from {@link javax.lang.model.element.Modifier},
* and while they are similar, there is not a direct one-to-one correspondence.
* For example, {@code javax.lang.model} models interfaces differently,
* and runtime reflection has not explicit value equivalent for DEFAULT.
*
* Note, the spec for the computing the default serialVersionUID is defined
* in terms of the runtime kind of modifiers.
*/
private static class IntModifier {
static final int ABSTRACT = java.lang.reflect.Modifier.ABSTRACT;
static final int FINAL = java.lang.reflect.Modifier.FINAL;
static final int INTERFACE = java.lang.reflect.Modifier.INTERFACE;
static final int NATIVE = java.lang.reflect.Modifier.NATIVE;
static final int PRIVATE = java.lang.reflect.Modifier.PRIVATE;
static final int PROTECTED = java.lang.reflect.Modifier.PROTECTED;
static final int PUBLIC = java.lang.reflect.Modifier.PUBLIC;
static final int STATIC = java.lang.reflect.Modifier.STATIC;
static final int STRICT = java.lang.reflect.Modifier.STRICT;
static final int SYNCHRONIZED = java.lang.reflect.Modifier.SYNCHRONIZED;
static final int TRANSIENT = java.lang.reflect.Modifier.TRANSIENT;
static final int VOLATILE = java.lang.reflect.Modifier.VOLATILE;
static int getModifiers(Element e) {
int mods = 0;
for (Modifier m : e.getModifiers()) {
switch (m) {
case ABSTRACT: mods |= ABSTRACT; break;
case DEFAULT: /* no equivalent */ break;
case FINAL: mods |= FINAL; break;
case NATIVE: mods |= NATIVE; break;
case PRIVATE: mods |= PRIVATE; break;
case PROTECTED: mods |= PROTECTED; break;
case PUBLIC: mods |= PUBLIC; break;
case STATIC: mods |= STATIC; break;
case STRICTFP: mods |= STRICT; break;
case SYNCHRONIZED: mods |= SYNCHRONIZED; break;
case TRANSIENT: mods |= TRANSIENT; break;
case VOLATILE: mods |= VOLATILE; break;
}
}
if (e.getKind().isInterface()) {
mods |= INTERFACE;
}
return mods;
}
}
/**
* A simple container for a field or executable member of a type element,
* providing the information that will be used to computer the default serialVersionUID.
*/
private static class MemberSignature {
String name;
Element member;
String signature;
MemberSignature(Element ve) {
name = ve.getSimpleName().toString();
member = ve;
signature = descriptorVisitor.visit(ve.asType(), new StringBuilder()).toString();
}
}
/**
* A visitor to compute the signature (descriptor) for members of a type element.
*/
private static TypeVisitor<StringBuilder,StringBuilder> descriptorVisitor = new SimpleTypeVisitor14<>() {
@Override
public StringBuilder defaultAction(TypeMirror t, StringBuilder sb) {
throw new Error(t.getKind() + ": " + t.toString());
}
@Override
public StringBuilder visitArray(ArrayType t, StringBuilder sb) {
sb.append("[");
return t.getComponentType().accept(this, sb);
}
@Override
public StringBuilder visitDeclared(DeclaredType t, StringBuilder sb) {
return sb.append("L")
.append(((TypeElement) t.asElement()).getQualifiedName().toString().replace(".", "/"))
.append(";");
}
@Override
public StringBuilder visitExecutable(ExecutableType t, StringBuilder sb) {
sb.append('(');
for (TypeMirror p : t.getParameterTypes()) {
p.accept(this, sb);
}
sb.append(')');
return t.getReturnType().accept(this, sb);
}
@Override
public StringBuilder visitTypeVariable(TypeVariable t, StringBuilder sb) {
return sb.append("Ljava/lang/Object;"); // TODO: use bounds? types.erasure(t).accept(this, sb) ?
}
@Override
public StringBuilder visitNoType(NoType t, StringBuilder sb) {
if (t.getKind() != TypeKind.VOID) {
throw new IllegalArgumentException((t.toString()));
}
return sb.append('V');
}
@Override
public StringBuilder visitPrimitive(PrimitiveType t, StringBuilder sb) {
char ch = switch (t.getKind()) {
case BYTE -> 'B';
case CHAR -> 'C';
case DOUBLE -> 'D';
case FLOAT -> 'F';
case INT -> 'I';
case LONG -> 'L';
case SHORT -> 'S';
case BOOLEAN -> 'Z';
default -> throw new IllegalArgumentException(t.toString());
};
return sb.append(ch);
}
};
//</editor-fold>
//<editor-fold desc="Serialized Methods">
/**
* Returns the list of methods related to the serialization in a type element
* that is externalizable.
*
* The list includes: {@code readExternal}, {@code writeExternal}, {@code readResolve}
* and {@code writeReplace}.
*
* @param te the type element
*
* @return the list
*/
private List<ExecutableElement> getExternalizableMethods(TypeElement te) {
return getMethods(te, ee ->
isMethod(ee, readExternal, objectInput)
|| isMethod(ee, writeExternal, objectOutput)
|| isMethod(ee, readResolve)
|| isMethod(ee, writeReplace));
}
/**
* Returns the list of methods related to the serialization in a type element
* that is serializable (but not externalizable).
*
* The list includes: {@code readObject}, {@code readObjectNoData}, {@code writeObject},
* {@code readResolve} and {@code writeReplace}.
*
* @param te the type element
*
* @return the list
*/
private List<ExecutableElement> getSerializableMethods(TypeElement te) {
return getMethods(te, ee ->
isMethod(ee, readObject, objectInputStream)
|| isMethod(ee, readObjectNoData)
|| isMethod(ee, writeObject, objectOutputStream)
|| isMethod(ee, readResolve)
|| isMethod(ee, writeReplace));
}
/**
* Returns the list of methods in a type element that match a given predicate.
*
* @param te the type element
* @param filter the predicate
*
* @return the list
*/
private List<ExecutableElement> getMethods(TypeElement te, Predicate<ExecutableElement> filter) {
Map<Name, ExecutableElement> map = new HashMap<>();
for (Element e : elements.getAllMembers(te)) {
if (e.getKind() != ElementKind.METHOD) {
continue;
}
ExecutableElement ee = (ExecutableElement) e;
if (filter.test(ee)) {
ExecutableElement prev = map.get(ee.getSimpleName());
if (prev == null || elements.overrides(ee, prev, te)) {
map.put(ee.getSimpleName(), ee);
}
}
}
List<ExecutableElement> list = new ArrayList<>(map.values());
list.sort((e1, e2) -> CharSequence.compare(e1.getSimpleName(), e2.getSimpleName()));
return list;
}
/**
* Returns whether an executable element has a given name and no parameters.
*
* @param ee the element
* @param name the name
*
* @return true if the element has the given name and no parameters
*/
private boolean isMethod(ExecutableElement ee, Name name) {
return ee.getSimpleName() == name
&& ee.getParameters().isEmpty();
}
/**
* Returns whether an executable element has a given name and a single parameter
* of a given type.
*
* @param ee the element
* @param name the name
* @param param the parameter type
*
* @return true if the element has the given name and no parameters
*/
private boolean isMethod(ExecutableElement ee, Name name, TypeMirror param) {
return ee.getSimpleName() == name
&& ee.getParameters().size() == 1
&& types.isSameType(ee.getParameters().get(0).asType(), param);
}
//</editor-fold>
//<editor-fold desc="Serialized Fields">
/**
* Returns the list of fields related to the serialization in a type element
* that is serializable (but not externalizable).
*
* The list contains the default set of fields to be serialized.
* This set is determined from the {@code @serialField} tags on the {@code persistentSerialFields}
* (if defined), or the list of non-static non-transient fields declared
* in the type element.
*
* The list also contains the fields for {@code serialVersionUID} and {@code persistentSerialFields},
* if present. They can be distinguished from the default set of fields to be serialized
* by name and by being declared to be {@code static}.
*
*
* @param te the type element
*
* @return the list of fields in the serialized form
*/
private List<SerializedForm.Field> getSerializableFields(TypeElement te) {
List<SerializedForm.Field> list = new ArrayList<>();
VariableElement spf = te.getEnclosedElements().stream()
.filter(e -> e.getKind() == ElementKind.FIELD)
.map(e -> (VariableElement) e)
.filter(this::isSerialPersistentFields)
.findFirst()
.orElse(null);
if (spf != null) {
DocCommentTree dct = trees.getDocCommentTree(spf);
if (dct != null) {
DocTreePathScanner<Void,List<SerializedForm.Field>> scanner = new DocTreePathScanner<>() {
@Override
public Void visitSerialField(SerialFieldTree tree, List<SerializedForm.Field> list) {
list.add(new DocumentedField(te, getCurrentPath()));
return null;
}
};
scanner.scan(new DocTreePath(trees.getPath(te), dct), list);
}
} else {
for (VariableElement ve : ElementFilter.fieldsIn(te.getEnclosedElements())) {
Set<Modifier> modifiers = ve.getModifiers();
if (modifiers.contains(Modifier.STATIC) || modifiers.contains(Modifier.TRANSIENT)) {
continue;
}
list.add(new VariableElementField(ve));
}
}
return list;
}
/**
* Returns whether a field is a valid declaration of {@code serialPersistentFields}.
*
* @param ve the field
*
* @return {@code true} if and only if this is a valid declaration of {@code serialPersistentFields}
*/
private boolean isSerialPersistentFields(VariableElement ve) {
return ve.getSimpleName() == serialPersistentFields
&& ve.getModifiers().equals(privateStaticFinal)
&& types.isSameType(ve.asType(), types.getArrayType(objectStreamField));
}
/**
* Details for a field in a serialized form, that is derived from information
* in {@code @serialField} tags on the {@code serialPersistentFields} field.
*/
private class DocumentedField implements SerializedForm.Field {
private final TypeElement enclosingTypeElement;
private final Name name;
private final TypeMirror type;
private final List<? extends DocTree> description;
private final String signature;
DocumentedField(TypeElement te, DocTreePath p) {
enclosingTypeElement = te;
DocTree t = p.getLeaf();
if (t.getKind() != DocTree.Kind.SERIAL_FIELD) {
throw new IllegalArgumentException(t.getKind().toString());
}
SerialFieldTree sft = (SerialFieldTree) t;
name = sft.getName().getName();
type = getType(p, sft.getType());
description = sft.getDescription();
signature = sft.getType().toString();
}
@Override
public TypeElement getEnclosingTypeElement() {
return enclosingTypeElement;
}
/**
* Returns the type for the signature found in a {@code @serialField} tag,
* or a type of kind {@code NONE} if the type cannot be resolved.
*
* Note: it would be better if it was possible to use a type of kind ERROR
* instead of NONE, but that cannot be done with the current API.
*
* javac does not directly support array signatures, so count and remove
* the trailing '[]' characters, look up the base type, and then convert
* to the appropriate number of levels of array.
*
* @param serialFieldPath the path for {@code serialField} tag
* @param refTree the reference tree within the {@code serialField} tag
*
* @return the type
*/
private TypeMirror getType(DocTreePath serialFieldPath, ReferenceTree refTree) {
String sig = refTree.getSignature();
int dims = 0;
int index = sig.length();
while (index > 2) {
if (sig.charAt(index - 2) == '[' && sig.charAt(index - 1) == ']') {
dims++;
index -= 2;
} else {
break;
}
}
String baseSig = sig.substring(0, index);
TypeMirror t;
switch (baseSig) {
case "boolean" -> t = types.getPrimitiveType(TypeKind.BOOLEAN);
case "byte" -> t = types.getPrimitiveType(TypeKind.BYTE);
case "char" -> t = types.getPrimitiveType(TypeKind.CHAR);
case "double" -> t = types.getPrimitiveType(TypeKind.DOUBLE);
case "float" -> t = types.getPrimitiveType(TypeKind.FLOAT);
case "int" -> t = types.getPrimitiveType(TypeKind.INT);
case "long" -> t = types.getPrimitiveType(TypeKind.LONG);
case "short" -> t = types.getPrimitiveType(TypeKind.SHORT);
case "void" -> t = types.getPrimitiveType(TypeKind.VOID);
default -> {
DocTreePath refPath = new DocTreePath(serialFieldPath,
dims == 0 ? refTree : trees.getDocTreeFactory().newReferenceTree(baseSig));
Element e = trees.getElement(refPath);
if (e == null) {
// ideally, we would be able to use an instance of an ERROR type,
// but that is not available in the API, so use NONE as a marker value instead.
return types.getNoType(TypeKind.NONE);
}
t = e.asType();
}
}
while (dims > 0) {
t = types.getArrayType(t);
dims--;
}
return t;
}
@Override
public Name getName() {
return name;
}
@Override
public TypeMirror getType() {
return type;
}
@Override
public List<? extends DocTree> getDocComment() {
return description;
}
@Override
public String getSignature() {
return signature;
}
}
/**
* Details for a field in a serialized form, that is derived from a field
* in the type element.
*/
private class VariableElementField implements SerializedForm.Field {
VariableElement ve;
VariableElementField(VariableElement ve) {
this.ve = ve;
}
@Override
public TypeElement getEnclosingTypeElement() {
return (TypeElement) ve.getEnclosingElement();
}
@Override
public Name getName() {
return ve.getSimpleName();
}
@Override
public TypeMirror getType() {
return ve.asType();
}
@Override
public List<? extends DocTree> getDocComment() {
DocCommentTree dct = trees.getDocCommentTree(ve);
return dct == null ? null : List.of(dct);
}
@Override
public String getSignature() {
return ve.asType().toString();
}
}
//</editor-fold>
}
|
apache/hive | 37,630 | accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableInputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.accumulo.mr;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.ZooKeeperInstance;
import org.apache.accumulo.core.client.mock.MockInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.accumulo.core.util.Pair;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters;
import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants;
import org.apache.hadoop.hive.accumulo.AccumuloHiveRow;
import org.apache.hadoop.hive.accumulo.HiveAccumuloHelper;
import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
import org.apache.hadoop.hive.accumulo.columns.ColumnMapper;
import org.apache.hadoop.hive.accumulo.columns.ColumnMapping;
import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloColumnMapping;
import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping;
import org.apache.hadoop.hive.accumulo.predicate.AccumuloPredicateHandler;
import org.apache.hadoop.hive.accumulo.predicate.PrimitiveComparisonFilter;
import org.apache.hadoop.hive.accumulo.predicate.compare.DoubleCompare;
import org.apache.hadoop.hive.accumulo.predicate.compare.Equal;
import org.apache.hadoop.hive.accumulo.predicate.compare.GreaterThan;
import org.apache.hadoop.hive.accumulo.predicate.compare.GreaterThanOrEqual;
import org.apache.hadoop.hive.accumulo.predicate.compare.IntCompare;
import org.apache.hadoop.hive.accumulo.predicate.compare.LessThan;
import org.apache.hadoop.hive.accumulo.predicate.compare.LongCompare;
import org.apache.hadoop.hive.accumulo.predicate.compare.StringCompare;
import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters;
import org.apache.hadoop.hive.accumulo.serde.TooManyAccumuloColumnsException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.mockito.Mockito;
import com.google.common.collect.Sets;
public class TestHiveAccumuloTableInputFormat {
public static final String USER = "user";
public static final String PASS = "password";
public static final String TEST_TABLE = "table1";
public static final Text COLUMN_FAMILY = new Text("cf");
private static final Text NAME = new Text("name");
private static final Text SID = new Text("sid");
private static final Text DEGREES = new Text("dgrs");
private static final Text MILLIS = new Text("mills");
private Instance mockInstance;
private Connector con;
private HiveAccumuloTableInputFormat inputformat;
private JobConf conf;
private List<String> columnNames;
private List<TypeInfo> columnTypes;
@Rule
public TestName test = new TestName();
@Before
public void createMockKeyValues() throws Exception {
// Make a MockInstance here, by setting the instance name to be the same as this mock instance
// we can "trick" the InputFormat into using a MockInstance
mockInstance = new MockInstance(test.getMethodName());
inputformat = new HiveAccumuloTableInputFormat();
conf = new JobConf();
conf.set(AccumuloSerDeParameters.TABLE_NAME, TEST_TABLE);
conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
conf.set(AccumuloSerDeParameters.USER_NAME, USER);
conf.set(AccumuloSerDeParameters.USER_PASS, PASS);
conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181"); // not used for mock, but
// required by input format.
columnNames = Arrays.asList("name", "sid", "dgrs", "mills");
columnTypes = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo,
TypeInfoFactory.intTypeInfo, TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.longTypeInfo);
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:name,cf:sid,cf:dgrs,cf:mills");
conf.set(serdeConstants.LIST_COLUMNS, "name,sid,dgrs,mills");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,double,bigint");
con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
con.tableOperations().create(TEST_TABLE);
con.securityOperations().changeUserAuthorizations(USER, new Authorizations("blah"));
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
Mutation m1 = new Mutation(new Text("r1"));
m1.put(COLUMN_FAMILY, NAME, new Value("brian".getBytes()));
m1.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("1")));
m1.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("44.5")));
m1.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("555")));
Mutation m2 = new Mutation(new Text("r2"));
m2.put(COLUMN_FAMILY, NAME, new Value("mark".getBytes()));
m2.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("2")));
m2.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("55.5")));
m2.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("666")));
Mutation m3 = new Mutation(new Text("r3"));
m3.put(COLUMN_FAMILY, NAME, new Value("dennis".getBytes()));
m3.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("3")));
m3.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("65.5")));
m3.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("777")));
writer.addMutation(m1);
writer.addMutation(m2);
writer.addMutation(m3);
writer.close();
}
private byte[] parseIntBytes(String s) throws IOException {
int val = Integer.parseInt(s);
ByteArrayOutputStream baos = new ByteArrayOutputStream(4);
DataOutputStream out = new DataOutputStream(baos);
out.writeInt(val);
out.close();
return baos.toByteArray();
}
private byte[] parseLongBytes(String s) throws IOException {
long val = Long.parseLong(s);
ByteArrayOutputStream baos = new ByteArrayOutputStream(8);
DataOutputStream out = new DataOutputStream(baos);
out.writeLong(val);
out.close();
return baos.toByteArray();
}
private byte[] parseDoubleBytes(String s) throws IOException {
double val = Double.parseDouble(s);
ByteArrayOutputStream baos = new ByteArrayOutputStream(8);
DataOutputStream out = new DataOutputStream(baos);
out.writeDouble(val);
out.close();
return baos.toByteArray();
}
@Test
public void testHiveAccumuloRecord() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text,AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text rowId = new Text("r1");
AccumuloHiveRow row = new AccumuloHiveRow();
row.add(COLUMN_FAMILY.toString(), NAME.toString(), "brian".getBytes());
row.add(COLUMN_FAMILY.toString(), SID.toString(), parseIntBytes("1"));
row.add(COLUMN_FAMILY.toString(), DEGREES.toString(), parseDoubleBytes("44.5"));
row.add(COLUMN_FAMILY.toString(), MILLIS.toString(), parseLongBytes("555"));
assertTrue(reader.next(rowId, row));
assertEquals(rowId.toString(), row.getRowId());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals("brian".getBytes(), row.getValue(COLUMN_FAMILY, NAME));
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, SID));
assertArrayEquals(parseIntBytes("1"), row.getValue(COLUMN_FAMILY, SID));
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, DEGREES));
assertArrayEquals(parseDoubleBytes("44.5"), row.getValue(COLUMN_FAMILY, DEGREES));
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, MILLIS));
assertArrayEquals(parseLongBytes("555"), row.getValue(COLUMN_FAMILY, MILLIS));
}
@Test
public void testGetOnlyName() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text,AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text rowId = new Text("r1");
AccumuloHiveRow row = new AccumuloHiveRow();
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
rowId = new Text("r2");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
rowId = new Text("r3");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
assertFalse(reader.next(rowId, row));
}
@Test
public void testDegreesAndMillis() throws Exception {
Connector con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
Scanner scan = con.createScanner(TEST_TABLE, new Authorizations("blah"));
IteratorSetting is = new IteratorSetting(1, PrimitiveComparisonFilter.FILTER_PREFIX + 1,
PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, DoubleCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, GreaterThanOrEqual.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL, Base64.getEncoder().encodeToString(parseDoubleBytes("55.6")));
is.addOption(PrimitiveComparisonFilter.COLUMN, "cf:dgrs");
scan.addScanIterator(is);
IteratorSetting is2 = new IteratorSetting(2, PrimitiveComparisonFilter.FILTER_PREFIX + 2,
PrimitiveComparisonFilter.class);
is2.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, LongCompare.class.getName());
is2.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, LessThan.class.getName());
is2.addOption(PrimitiveComparisonFilter.CONST_VAL, Base64.getEncoder().encodeToString(parseLongBytes("778")));
is2.addOption(PrimitiveComparisonFilter.COLUMN, "cf:mills");
scan.addScanIterator(is2);
boolean foundDennis = false;
int totalCount = 0;
for (Map.Entry<Key,Value> kv : scan) {
boolean foundName = false;
boolean foundSid = false;
boolean foundDegrees = false;
boolean foundMillis = false;
SortedMap<Key,Value> items = PrimitiveComparisonFilter.decodeRow(kv.getKey(), kv.getValue());
for (Map.Entry<Key,Value> item : items.entrySet()) {
SortedMap<Key,Value> nestedItems = PrimitiveComparisonFilter.decodeRow(item.getKey(),
item.getValue());
for (Map.Entry<Key,Value> nested : nestedItems.entrySet()) {
if (nested.getKey().getRow().toString().equals("r3")) {
foundDennis = true;
}
if (nested.getKey().getColumnQualifier().equals(NAME)) {
foundName = true;
} else if (nested.getKey().getColumnQualifier().equals(SID)) {
foundSid = true;
} else if (nested.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees = true;
} else if (nested.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis = true;
}
}
}
totalCount++;
assertTrue(foundDegrees & foundMillis & foundName & foundSid);
}
assertTrue(foundDennis);
assertEquals(totalCount, 1);
}
@Test
public void testGreaterThan1Sid() throws Exception {
Connector con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
Scanner scan = con.createScanner(TEST_TABLE, new Authorizations("blah"));
IteratorSetting is = new IteratorSetting(1, PrimitiveComparisonFilter.FILTER_PREFIX + 1,
PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, IntCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, GreaterThan.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL, Base64.getEncoder().encodeToString(parseIntBytes("1")));
is.addOption(PrimitiveComparisonFilter.COLUMN, "cf:sid");
scan.addScanIterator(is);
boolean foundMark = false;
boolean foundDennis = false;
int totalCount = 0;
for (Map.Entry<Key,Value> kv : scan) {
boolean foundName = false;
boolean foundSid = false;
boolean foundDegrees = false;
boolean foundMillis = false;
SortedMap<Key,Value> items = PrimitiveComparisonFilter.decodeRow(kv.getKey(), kv.getValue());
for (Map.Entry<Key,Value> item : items.entrySet()) {
if (item.getKey().getRow().toString().equals("r2")) {
foundMark = true;
} else if (item.getKey().getRow().toString().equals("r3")) {
foundDennis = true;
}
if (item.getKey().getColumnQualifier().equals(NAME)) {
foundName = true;
} else if (item.getKey().getColumnQualifier().equals(SID)) {
foundSid = true;
} else if (item.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees = true;
} else if (item.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis = true;
}
}
totalCount++;
assertTrue(foundDegrees & foundMillis & foundName & foundSid);
}
assertTrue(foundDennis & foundMark);
assertEquals(totalCount, 2);
}
@Test
public void testNameEqualBrian() throws Exception {
Connector con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
Scanner scan = con.createScanner(TEST_TABLE, new Authorizations("blah"));
IteratorSetting is = new IteratorSetting(1, PrimitiveComparisonFilter.FILTER_PREFIX + 1,
PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, StringCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL, Base64.getEncoder().encodeToString("brian".getBytes()));
is.addOption(PrimitiveComparisonFilter.COLUMN, "cf:name");
scan.addScanIterator(is);
boolean foundName = false;
boolean foundSid = false;
boolean foundDegrees = false;
boolean foundMillis = false;
for (Map.Entry<Key,Value> kv : scan) {
SortedMap<Key,Value> items = PrimitiveComparisonFilter.decodeRow(kv.getKey(), kv.getValue());
for (Map.Entry<Key,Value> item : items.entrySet()) {
assertEquals(item.getKey().getRow().toString(), "r1");
if (item.getKey().getColumnQualifier().equals(NAME)) {
foundName = true;
assertArrayEquals(item.getValue().get(), "brian".getBytes());
} else if (item.getKey().getColumnQualifier().equals(SID)) {
foundSid = true;
assertArrayEquals(item.getValue().get(), parseIntBytes("1"));
} else if (item.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees = true;
assertArrayEquals(item.getValue().get(), parseDoubleBytes("44.5"));
} else if (item.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis = true;
assertArrayEquals(item.getValue().get(), parseLongBytes("555"));
}
}
}
assertTrue(foundDegrees & foundMillis & foundName & foundSid);
}
@Test
public void testGetNone() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:f1");
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text,AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text rowId = new Text("r1");
AccumuloHiveRow row = new AccumuloHiveRow();
row.setRowId("r1");
assertFalse(reader.next(rowId, row));
}
@Test
public void testIteratorNotInSplitsCompensation() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(1, splits.length);
InputSplit split = splits[0];
IteratorSetting is = new IteratorSetting(1, PrimitiveComparisonFilter.FILTER_PREFIX + 1,
PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, StringCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL, Base64.getEncoder().encodeToString(new byte[] {'0'}));
is.addOption(PrimitiveComparisonFilter.COLUMN, "cf:cq");
// Mock out the predicate handler because it's just easier
AccumuloPredicateHandler predicateHandler = Mockito.mock(AccumuloPredicateHandler.class);
Mockito.when(
predicateHandler.getIterators(Mockito.any(JobConf.class), Mockito.any(ColumnMapper.class)))
.thenReturn(Arrays.asList(is));
// Set it on our inputformat
inputformat.predicateHandler = predicateHandler;
inputformat.getRecordReader(split, conf, null);
// The code should account for the bug and update the iterators on the split
List<IteratorSetting> settingsOnSplit = ((HiveAccumuloSplit) split).getSplit().getIterators();
assertEquals(1, settingsOnSplit.size());
assertEquals(is, settingsOnSplit.get(0));
}
@Test
public void testColumnMappingsToPairs() {
List<ColumnMapping> mappings = new ArrayList<ColumnMapping>();
Set<Pair<Text,Text>> columns = new HashSet<Pair<Text,Text>>();
// Row ID
mappings.add(new HiveAccumuloRowIdColumnMapping(AccumuloHiveConstants.ROWID,
ColumnEncoding.STRING, "row", TypeInfoFactory.stringTypeInfo.toString()));
// Some cf:cq
mappings.add(new HiveAccumuloColumnMapping("person", "name", ColumnEncoding.STRING, "col1",
TypeInfoFactory.stringTypeInfo.toString()));
mappings.add(new HiveAccumuloColumnMapping("person", "age", ColumnEncoding.STRING, "col2",
TypeInfoFactory.stringTypeInfo.toString()));
mappings.add(new HiveAccumuloColumnMapping("person", "height", ColumnEncoding.STRING, "col3",
TypeInfoFactory.stringTypeInfo.toString()));
// Bare cf
mappings.add(new HiveAccumuloColumnMapping("city", "name", ColumnEncoding.STRING, "col4",
TypeInfoFactory.stringTypeInfo.toString()));
columns.add(new Pair<Text,Text>(new Text("person"), new Text("name")));
columns.add(new Pair<Text,Text>(new Text("person"), new Text("age")));
columns.add(new Pair<Text,Text>(new Text("person"), new Text("height")));
// Null qualifier would mean all qualifiers in that family, want an empty qualifier
columns.add(new Pair<Text,Text>(new Text("city"), new Text("name")));
assertEquals(columns, inputformat.getPairCollection(mappings));
}
@Test
public void testConfigureMockAccumuloInputFormat() throws Exception {
AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters(conf);
ColumnMapper columnMapper = new ColumnMapper(conf.get(AccumuloSerDeParameters.COLUMN_MAPPINGS),
conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE), columnNames, columnTypes);
Set<Pair<Text,Text>> cfCqPairs = inputformat
.getPairCollection(columnMapper.getColumnMappings());
List<IteratorSetting> iterators = Collections.emptyList();
Set<Range> ranges = Collections.singleton(new Range());
HiveAccumuloTableInputFormat mockInputFormat = Mockito.mock(HiveAccumuloTableInputFormat.class);
HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
// Stub out a mocked Helper instance
Mockito.when(mockInputFormat.getHelper()).thenReturn(helper);
// Call out to the real configure method
Mockito.doCallRealMethod().when(mockInputFormat)
.configure(conf, mockInstance, con, accumuloParams, columnMapper, iterators, ranges);
// Also compute the correct cf:cq pairs so we can assert the right argument was passed
Mockito.doCallRealMethod().when(mockInputFormat)
.getPairCollection(columnMapper.getColumnMappings());
mockInputFormat.configure(conf, mockInstance, con, accumuloParams, columnMapper, iterators,
ranges);
// Verify that the correct methods are invoked on AccumuloInputFormat
Mockito.verify(helper).setInputFormatMockInstance(conf, mockInstance.getInstanceName());
Mockito.verify(helper).setInputFormatConnectorInfo(conf, USER, new PasswordToken(PASS));
Mockito.verify(mockInputFormat).setInputTableName(conf, TEST_TABLE);
Mockito.verify(mockInputFormat).setScanAuthorizations(conf,
con.securityOperations().getUserAuthorizations(USER));
Mockito.verify(mockInputFormat).addIterators(conf, iterators);
Mockito.verify(mockInputFormat).setRanges(conf, ranges);
Mockito.verify(mockInputFormat).fetchColumns(conf, cfCqPairs);
}
@Test
public void testConfigureAccumuloInputFormat() throws Exception {
AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters(conf);
ColumnMapper columnMapper = new ColumnMapper(conf.get(AccumuloSerDeParameters.COLUMN_MAPPINGS),
conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE), columnNames, columnTypes);
Set<Pair<Text,Text>> cfCqPairs = inputformat
.getPairCollection(columnMapper.getColumnMappings());
List<IteratorSetting> iterators = Collections.emptyList();
Set<Range> ranges = Collections.singleton(new Range());
String instanceName = "realInstance";
String zookeepers = "host1:2181,host2:2181,host3:2181";
ZooKeeperInstance zkInstance = Mockito.mock(ZooKeeperInstance.class);
HiveAccumuloTableInputFormat mockInputFormat = Mockito.mock(HiveAccumuloTableInputFormat.class);
HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
// Stub out the ZKI mock
Mockito.when(zkInstance.getInstanceName()).thenReturn(instanceName);
Mockito.when(zkInstance.getZooKeepers()).thenReturn(zookeepers);
// Stub out a mocked Helper instance
Mockito.when(mockInputFormat.getHelper()).thenReturn(helper);
// Call out to the real configure method
Mockito.doCallRealMethod().when(mockInputFormat)
.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators, ranges);
// Also compute the correct cf:cq pairs so we can assert the right argument was passed
Mockito.doCallRealMethod().when(mockInputFormat)
.getPairCollection(columnMapper.getColumnMappings());
mockInputFormat.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators,
ranges);
// Verify that the correct methods are invoked on AccumuloInputFormat
Mockito.verify(helper).setInputFormatZooKeeperInstance(conf, instanceName, zookeepers, false);
Mockito.verify(helper).setInputFormatConnectorInfo(conf, USER, new PasswordToken(PASS));
Mockito.verify(mockInputFormat).setInputTableName(conf, TEST_TABLE);
Mockito.verify(mockInputFormat).setScanAuthorizations(conf,
con.securityOperations().getUserAuthorizations(USER));
Mockito.verify(mockInputFormat).addIterators(conf, iterators);
Mockito.verify(mockInputFormat).setRanges(conf, ranges);
Mockito.verify(mockInputFormat).fetchColumns(conf, cfCqPairs);
}
@Test
public void testConfigureAccumuloInputFormatWithAuthorizations() throws Exception {
AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters(conf);
conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo,bar");
ColumnMapper columnMapper = new ColumnMapper(conf.get(AccumuloSerDeParameters.COLUMN_MAPPINGS),
conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE), columnNames, columnTypes);
Set<Pair<Text,Text>> cfCqPairs = inputformat
.getPairCollection(columnMapper.getColumnMappings());
List<IteratorSetting> iterators = Collections.emptyList();
Set<Range> ranges = Collections.singleton(new Range());
String instanceName = "realInstance";
String zookeepers = "host1:2181,host2:2181,host3:2181";
ZooKeeperInstance zkInstance = Mockito.mock(ZooKeeperInstance.class);
HiveAccumuloTableInputFormat mockInputFormat = Mockito.mock(HiveAccumuloTableInputFormat.class);
HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
// Stub out the ZKI mock
Mockito.when(zkInstance.getInstanceName()).thenReturn(instanceName);
Mockito.when(zkInstance.getZooKeepers()).thenReturn(zookeepers);
// Stub out a mocked Helper instance
Mockito.when(mockInputFormat.getHelper()).thenReturn(helper);
// Call out to the real configure method
Mockito.doCallRealMethod().when(mockInputFormat)
.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators, ranges);
// Also compute the correct cf:cq pairs so we can assert the right argument was passed
Mockito.doCallRealMethod().when(mockInputFormat)
.getPairCollection(columnMapper.getColumnMappings());
mockInputFormat.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators,
ranges);
// Verify that the correct methods are invoked on AccumuloInputFormat
Mockito.verify(helper).setInputFormatZooKeeperInstance(conf, instanceName, zookeepers, false);
Mockito.verify(helper).setInputFormatConnectorInfo(conf, USER, new PasswordToken(PASS));
Mockito.verify(mockInputFormat).setInputTableName(conf, TEST_TABLE);
Mockito.verify(mockInputFormat).setScanAuthorizations(conf, new Authorizations("foo,bar"));
Mockito.verify(mockInputFormat).addIterators(conf, iterators);
Mockito.verify(mockInputFormat).setRanges(conf, ranges);
Mockito.verify(mockInputFormat).fetchColumns(conf, cfCqPairs);
}
@Test
public void testConfigureAccumuloInputFormatWithIterators() throws Exception {
AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters(conf);
ColumnMapper columnMapper = new ColumnMapper(conf.get(AccumuloSerDeParameters.COLUMN_MAPPINGS),
conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE), columnNames, columnTypes);
Set<Pair<Text,Text>> cfCqPairs = inputformat
.getPairCollection(columnMapper.getColumnMappings());
List<IteratorSetting> iterators = new ArrayList<IteratorSetting>();
Set<Range> ranges = Collections.singleton(new Range());
String instanceName = "realInstance";
String zookeepers = "host1:2181,host2:2181,host3:2181";
IteratorSetting cfg = new IteratorSetting(50, PrimitiveComparisonFilter.class);
cfg.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, StringCompare.class.getName());
cfg.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
cfg.addOption(PrimitiveComparisonFilter.CONST_VAL, "dave");
cfg.addOption(PrimitiveComparisonFilter.COLUMN, "person:name");
iterators.add(cfg);
cfg = new IteratorSetting(50, PrimitiveComparisonFilter.class);
cfg.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, IntCompare.class.getName());
cfg.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
cfg.addOption(PrimitiveComparisonFilter.CONST_VAL, "50");
cfg.addOption(PrimitiveComparisonFilter.COLUMN, "person:age");
iterators.add(cfg);
ZooKeeperInstance zkInstance = Mockito.mock(ZooKeeperInstance.class);
HiveAccumuloTableInputFormat mockInputFormat = Mockito.mock(HiveAccumuloTableInputFormat.class);
HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
// Stub out the ZKI mock
Mockito.when(zkInstance.getInstanceName()).thenReturn(instanceName);
Mockito.when(zkInstance.getZooKeepers()).thenReturn(zookeepers);
// Stub out a mocked Helper instance
Mockito.when(mockInputFormat.getHelper()).thenReturn(helper);
// Call out to the real configure method
Mockito.doCallRealMethod().when(mockInputFormat)
.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators, ranges);
// Also compute the correct cf:cq pairs so we can assert the right argument was passed
Mockito.doCallRealMethod().when(mockInputFormat)
.getPairCollection(columnMapper.getColumnMappings());
mockInputFormat.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators,
ranges);
// Verify that the correct methods are invoked on AccumuloInputFormat
Mockito.verify(helper).setInputFormatZooKeeperInstance(conf, instanceName, zookeepers, false);
Mockito.verify(helper).setInputFormatConnectorInfo(conf, USER, new PasswordToken(PASS));
Mockito.verify(mockInputFormat).setInputTableName(conf, TEST_TABLE);
Mockito.verify(mockInputFormat).setScanAuthorizations(conf,
con.securityOperations().getUserAuthorizations(USER));
Mockito.verify(mockInputFormat).addIterators(conf, iterators);
Mockito.verify(mockInputFormat).setRanges(conf, ranges);
Mockito.verify(mockInputFormat).fetchColumns(conf, cfCqPairs);
}
@Test
public void testConfigureAccumuloInputFormatWithEmptyColumns() throws Exception {
AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters(conf);
ColumnMapper columnMapper = new ColumnMapper(conf.get(AccumuloSerDeParameters.COLUMN_MAPPINGS),
conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE), columnNames, columnTypes);
HashSet<Pair<Text,Text>> cfCqPairs = Sets.newHashSet();
List<IteratorSetting> iterators = new ArrayList<IteratorSetting>();
Set<Range> ranges = Collections.singleton(new Range());
String instanceName = "realInstance";
String zookeepers = "host1:2181,host2:2181,host3:2181";
IteratorSetting cfg = new IteratorSetting(50, PrimitiveComparisonFilter.class);
cfg.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, StringCompare.class.getName());
cfg.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
cfg.addOption(PrimitiveComparisonFilter.CONST_VAL, "dave");
cfg.addOption(PrimitiveComparisonFilter.COLUMN, "person:name");
iterators.add(cfg);
cfg = new IteratorSetting(50, PrimitiveComparisonFilter.class);
cfg.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, IntCompare.class.getName());
cfg.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
cfg.addOption(PrimitiveComparisonFilter.CONST_VAL, "50");
cfg.addOption(PrimitiveComparisonFilter.COLUMN, "person:age");
iterators.add(cfg);
ZooKeeperInstance zkInstance = Mockito.mock(ZooKeeperInstance.class);
HiveAccumuloTableInputFormat mockInputFormat = Mockito.mock(HiveAccumuloTableInputFormat.class);
HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
// Stub out the ZKI mock
Mockito.when(zkInstance.getInstanceName()).thenReturn(instanceName);
Mockito.when(zkInstance.getZooKeepers()).thenReturn(zookeepers);
Mockito.when(mockInputFormat.getPairCollection(columnMapper.getColumnMappings())).thenReturn(
cfCqPairs);
// Stub out a mocked Helper instance
Mockito.when(mockInputFormat.getHelper()).thenReturn(helper);
// Call out to the real configure method
Mockito.doCallRealMethod().when(mockInputFormat)
.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators, ranges);
// Also compute the correct cf:cq pairs so we can assert the right argument was passed
Mockito.doCallRealMethod().when(mockInputFormat)
.getPairCollection(columnMapper.getColumnMappings());
mockInputFormat.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators,
ranges);
// Verify that the correct methods are invoked on AccumuloInputFormat
Mockito.verify(helper).setInputFormatZooKeeperInstance(conf, instanceName, zookeepers, false);
Mockito.verify(helper).setInputFormatConnectorInfo(conf, USER, new PasswordToken(PASS));
Mockito.verify(mockInputFormat).setInputTableName(conf, TEST_TABLE);
Mockito.verify(mockInputFormat).setScanAuthorizations(conf,
con.securityOperations().getUserAuthorizations(USER));
Mockito.verify(mockInputFormat).addIterators(conf, iterators);
Mockito.verify(mockInputFormat).setRanges(conf, ranges);
// fetchColumns is not called because we had no columns to fetch
}
@Test
public void testGetProtectedField() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
Authorizations origAuths = con.securityOperations().getUserAuthorizations(USER);
con.securityOperations().changeUserAuthorizations(USER,
new Authorizations(origAuths.toString() + ",foo"));
Mutation m = new Mutation("r4");
m.put(COLUMN_FAMILY, NAME, new ColumnVisibility("foo"), new Value("frank".getBytes()));
m.put(COLUMN_FAMILY, SID, new ColumnVisibility("foo"), new Value(parseIntBytes("4")));
m.put(COLUMN_FAMILY, DEGREES, new ColumnVisibility("foo"), new Value(parseDoubleBytes("60.6")));
m.put(COLUMN_FAMILY, MILLIS, new ColumnVisibility("foo"), new Value(parseLongBytes("777")));
writer.addMutation(m);
writer.close();
conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo");
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text,AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text rowId = new Text("r1");
AccumuloHiveRow row = new AccumuloHiveRow();
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
rowId = new Text("r2");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
rowId = new Text("r3");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
rowId = new Text("r4");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "frank".getBytes());
assertFalse(reader.next(rowId, row));
}
@Test
public void testMapColumnPairs() throws TooManyAccumuloColumnsException {
ColumnMapper columnMapper = new ColumnMapper(":rowID,cf:*",
conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE), Arrays.asList("row", "col"),
Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.getMapTypeInfo(
TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo)));
Set<Pair<Text,Text>> pairs = inputformat.getPairCollection(columnMapper.getColumnMappings());
Assert.assertEquals(1, pairs.size());
Pair<Text,Text> cfCq = pairs.iterator().next();
Assert.assertEquals("cf", cfCq.getFirst().toString());
Assert.assertNull(cfCq.getSecond());
}
}
|
oracle/graalpython | 37,493 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/builtins/modules/ast/Validator.java | /*
* Copyright (c) 2022, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.graal.python.builtins.modules.ast;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_ASSIGN;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_ASYNCFOR;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_ASYNCFUNCTIONDEF;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_ASYNCWITH;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_CLASSDEF;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_DELETE;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_EXCEPTHANDLER;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_FOR;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_FUNCTIONDEF;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_GLOBAL;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_IF;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_IMPORT;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_IMPORTFROM;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_MATCH;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_NONLOCAL;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_TRY;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_TRYSTAR;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_WHILE;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_C_WITH;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_F_BODY;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_F_CASES;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_F_ITEMS;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_F_NAMES;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_F_TARGETS;
import static com.oracle.graal.python.builtins.modules.ast.AstState.T_T_MATCH_CASE;
import static com.oracle.graal.python.nodes.ErrorMessages.AST_NODE_COLUMN_RANGE_FOR_LINE_RANGE_IS_NOT_VALID;
import static com.oracle.graal.python.nodes.ErrorMessages.AST_NODE_LINE_RANGE_IS_NOT_VALID;
import static com.oracle.graal.python.nodes.ErrorMessages.LINE_COLUMN_IS_NOT_A_VALID_RANGE;
import static com.oracle.graal.python.nodes.ErrorMessages.NAMEDEXPR_TARGET_MUST_BE_A_NAME;
import static com.oracle.graal.python.nodes.ErrorMessages.TYPEALIAS_WITH_NON_NAME_NAME;
import static com.oracle.graal.python.pegparser.sst.ExprContextTy.Del;
import static com.oracle.graal.python.pegparser.sst.ExprContextTy.Load;
import static com.oracle.graal.python.pegparser.sst.ExprContextTy.Store;
import com.oracle.graal.python.builtins.PythonBuiltinClassType;
import com.oracle.graal.python.nodes.ErrorMessages;
import com.oracle.graal.python.nodes.PRaiseNode;
import com.oracle.graal.python.pegparser.sst.AliasTy;
import com.oracle.graal.python.pegparser.sst.ArgTy;
import com.oracle.graal.python.pegparser.sst.ArgumentsTy;
import com.oracle.graal.python.pegparser.sst.ComprehensionTy;
import com.oracle.graal.python.pegparser.sst.ConstantValue;
import com.oracle.graal.python.pegparser.sst.ConstantValue.Kind;
import com.oracle.graal.python.pegparser.sst.ExceptHandlerTy;
import com.oracle.graal.python.pegparser.sst.ExprContextTy;
import com.oracle.graal.python.pegparser.sst.ExprTy;
import com.oracle.graal.python.pegparser.sst.ExprTy.UnaryOp;
import com.oracle.graal.python.pegparser.sst.KeywordTy;
import com.oracle.graal.python.pegparser.sst.MatchCaseTy;
import com.oracle.graal.python.pegparser.sst.ModTy;
import com.oracle.graal.python.pegparser.sst.OperatorTy;
import com.oracle.graal.python.pegparser.sst.PatternTy;
import com.oracle.graal.python.pegparser.sst.SSTNode;
import com.oracle.graal.python.pegparser.sst.SSTreeVisitor;
import com.oracle.graal.python.pegparser.sst.StmtTy;
import com.oracle.graal.python.pegparser.sst.StmtTy.TypeAlias;
import com.oracle.graal.python.pegparser.sst.TypeIgnoreTy;
import com.oracle.graal.python.pegparser.sst.TypeParamTy;
import com.oracle.graal.python.pegparser.sst.TypeParamTy.ParamSpec;
import com.oracle.graal.python.pegparser.sst.TypeParamTy.TypeVar;
import com.oracle.graal.python.pegparser.sst.TypeParamTy.TypeVarTuple;
import com.oracle.graal.python.pegparser.sst.UnaryOpTy;
import com.oracle.graal.python.pegparser.sst.WithItemTy;
import com.oracle.graal.python.pegparser.tokenizer.SourceRange;
import com.oracle.graal.python.runtime.exception.PException;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.strings.TruffleString;
final class Validator implements SSTreeVisitor<Void> {
private static final String[] FORBIDDEN_NAMES = {"None", "True", "False"};
private final Node node;
private Validator(Node node) {
this.node = node;
}
/*-
// ModTy validation
*/
// Equivalent of _PyAST_Validate - entry point of the validation
static void validateMod(Node node, ModTy mod) {
// TODO recursion checks
mod.accept(new Validator(node));
}
@Override
public Void visit(ModTy.Module node) {
validateStmts(node.body);
return null;
}
@Override
public Void visit(ModTy.Interactive node) {
validateStmts(node.body);
return null;
}
@Override
public Void visit(ModTy.Expression node) {
validateExpr(node.body, Load);
return null;
}
@Override
public Void visit(ModTy.FunctionType node) {
validateExprs(node.argTypes, Load, false);
validateExpr(node.returns, Load);
return null;
}
/*-
// StmtTy validation
*/
// Equivalent of validate_stmt
private void validateStmt(StmtTy stmt) {
validatePositions(stmt);
// TODO recursion checks
stmt.accept(this);
}
@Override
public Void visit(StmtTy.FunctionDef node) {
validateBody(node.body, T_C_FUNCTIONDEF);
validateTypeParams(node.typeParams);
visit(node.args);
validateExprs(node.decoratorList, Load, false);
if (node.returns != null) {
validateExpr(node.returns, Load);
}
return null;
}
@Override
public Void visit(StmtTy.ClassDef node) {
validateBody(node.body, T_C_CLASSDEF);
validateTypeParams(node.typeParams);
validateExprs(node.bases, Load, false);
validateKeywords(node.keywords);
validateExprs(node.decoratorList, Load, false);
return null;
}
@Override
public Void visit(StmtTy.Return node) {
if (node.value != null) {
validateExpr(node.value, Load);
}
return null;
}
@Override
public Void visit(StmtTy.Delete node) {
validateAssignList(node.targets, Del);
return null;
}
@Override
public Void visit(StmtTy.Assign node) {
validateAssignList(node.targets, Store);
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(StmtTy.AugAssign node) {
validateExpr(node.target, Store);
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(StmtTy.AnnAssign node) {
if (!(node.target instanceof ExprTy.Name) && node.isSimple) {
throw raiseTypeError(ErrorMessages.ANN_ASSIGN_WITH_SIMPLE_NON_NAME_TARGET);
}
validateExpr(node.target, Store);
if (node.value != null) {
validateExpr(node.value, Load);
}
validateExpr(node.annotation, Load);
return null;
}
@Override
public Void visit(StmtTy.For node) {
validateExpr(node.target, Store);
validateExpr(node.iter, Load);
validateBody(node.body, T_C_FOR);
validateStmts(node.orElse);
return null;
}
@Override
public Void visit(StmtTy.AsyncFor node) {
validateExpr(node.target, Store);
validateExpr(node.iter, Load);
validateBody(node.body, T_C_ASYNCFOR);
validateStmts(node.orElse);
return null;
}
@Override
public Void visit(StmtTy.While node) {
validateExpr(node.test, Load);
validateBody(node.body, T_C_WHILE);
validateStmts(node.orElse);
return null;
}
@Override
public Void visit(StmtTy.If node) {
validateExpr(node.test, Load);
validateBody(node.body, T_C_IF);
validateStmts(node.orElse);
return null;
}
@Override
public Void visit(StmtTy.With node) {
validateNonEmptySeq(node.items, T_F_ITEMS, T_C_WITH);
for (WithItemTy item : node.items) {
visit(item);
}
validateBody(node.body, T_C_WITH);
return null;
}
@Override
public Void visit(StmtTy.AsyncWith node) {
validateNonEmptySeq(node.items, T_F_ITEMS, T_C_ASYNCWITH);
for (WithItemTy item : node.items) {
visit(item);
}
validateBody(node.body, T_C_ASYNCWITH);
return null;
}
@Override
public Void visit(StmtTy.Match node) {
validateExpr(node.subject, Load);
validateNonEmptySeq(node.cases, T_F_CASES, T_C_MATCH);
for (MatchCaseTy matchCase : node.cases) {
visit(matchCase);
}
return null;
}
@Override
public Void visit(StmtTy.Raise node) {
if (node.exc != null) {
validateExpr(node.exc, Load);
if (node.cause != null) {
validateExpr(node.cause, Load);
}
} else if (node.cause != null) {
throw raiseValueError(ErrorMessages.RAISE_WITH_CAUSE_BUT_NO_EXCEPTION);
}
return null;
}
@Override
public Void visit(StmtTy.Try node) {
validateBody(node.body, T_C_TRY);
if (seqLen(node.handlers) == 0 && seqLen(node.finalBody) == 0) {
throw raiseValueError(ErrorMessages.TRY_HAS_NEITHER_EXCEPT_HANDLERS_NOR_FINALBODY);
}
if (seqLen(node.handlers) == 0 && seqLen(node.orElse) != 0) {
throw raiseValueError(ErrorMessages.TRY_HAS_ORELSE_BUT_NO_EXCEPT_HANDLERS);
}
if (node.handlers != null) {
for (ExceptHandlerTy handler : node.handlers) {
validatePositions(handler);
handler.accept(this);
}
}
if (seqLen(node.finalBody) != 0) {
validateStmts(node.finalBody);
}
if (seqLen(node.orElse) != 0) {
validateStmts(node.orElse);
}
return null;
}
@Override
public Void visit(StmtTy.TryStar node) {
validateBody(node.body, T_C_TRYSTAR);
if (seqLen(node.handlers) == 0 && seqLen(node.finalBody) == 0) {
throw raiseValueError(ErrorMessages.TRYSTAR_HAS_NEITHER_EXCEPT_HANDLERS_NOR_FINALBODY);
}
if (seqLen(node.handlers) == 0 && seqLen(node.orElse) != 0) {
throw raiseValueError(ErrorMessages.TRYSTAR_HAS_ORELSE_BUT_NO_EXCEPT_HANDLERS);
}
if (node.handlers != null) {
for (ExceptHandlerTy handler : node.handlers) {
handler.accept(this);
}
}
if (seqLen(node.finalBody) != 0) {
validateStmts(node.finalBody);
}
if (seqLen(node.orElse) != 0) {
validateStmts(node.orElse);
}
return null;
}
@Override
public Void visit(StmtTy.Assert node) {
validateExpr(node.test, Load);
if (node.msg != null) {
validateExpr(node.msg, Load);
}
return null;
}
@Override
public Void visit(StmtTy.Import node) {
validateNonEmptySeq(node.names, T_F_NAMES, T_C_IMPORT);
return null;
}
@Override
public Void visit(StmtTy.ImportFrom node) {
if (node.level < 0) {
throw raiseValueError(ErrorMessages.NEGATIVE_IMPORT_FROM_LEVEL);
}
validateNonEmptySeq(node.names, T_F_NAMES, T_C_IMPORTFROM);
return null;
}
@Override
public Void visit(StmtTy.Global node) {
validateNonEmptySeq(node.names, T_F_NAMES, T_C_GLOBAL);
return null;
}
@Override
public Void visit(StmtTy.Nonlocal node) {
validateNonEmptySeq(node.names, T_F_NAMES, T_C_NONLOCAL);
return null;
}
@Override
public Void visit(StmtTy.Expr node) {
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(StmtTy.AsyncFunctionDef node) {
validateBody(node.body, T_C_ASYNCFUNCTIONDEF);
validateTypeParams(node.typeParams);
visit(node.args);
validateExprs(node.decoratorList, Load, false);
if (node.returns != null) {
validateExpr(node.returns, Load);
}
return null;
}
@Override
public Void visit(StmtTy.Pass node) {
return null;
}
@Override
public Void visit(StmtTy.Break node) {
return null;
}
@Override
public Void visit(StmtTy.Continue node) {
return null;
}
/*-
// ExprTy validation
*/
// Equivalent of validate_expr
private void validateExpr(ExprTy expr, ExprContextTy context) {
assert context != null;
validatePositions(expr);
// TODO recursion checks
// CPython uses two switch(exp->kind) statements. We combine them in a single visitor, but
// each ExprTy visitor must call one of the checkContext() methods exactly
// once, and it must do so before any recursive calls to validateExpr.
assert expectedContext == null : "recursive validateExpr() before a call to checkContext()";
expectedContext = context;
expr.accept(this);
assert expectedContext == null : "checkContext() not called by " + expr.getClass().getSimpleName() + " visitor";
}
private ExprContextTy expectedContext;
// used by visitors of ExprTy nodes that have a 'context' field
private void checkContext(ExprContextTy actualContext) {
assert expectedContext != null : "checkContext() called more than once";
if (actualContext != expectedContext) {
throw raiseValueError(ErrorMessages.EXPRESSION_MUST_HAVE_S_CONTEXT_BUT_HAS_S_INSTEAD, expectedContext, actualContext);
}
expectedContext = null;
}
// used by visitors of ExprTy nodes that don't have any 'context' field
private void checkContext() {
assert expectedContext != null : "checkContext() called more than once";
if (expectedContext != Load) {
throw raiseValueError(ErrorMessages.EXPRESSION_WHICH_CANT_BE_ASSIGNED_TO_IN_S_CONTEXT, expectedContext);
}
expectedContext = null;
}
@Override
public Void visit(ExprTy.BoolOp node) {
checkContext();
if (seqLen(node.values) < 2) {
throw raiseValueError(ErrorMessages.BOOL_OP_WITH_LESS_THAN_2_VALUES);
}
validateExprs(node.values, Load, false);
return null;
}
@Override
public Void visit(ExprTy.BinOp node) {
checkContext();
validateExpr(node.left, Load);
validateExpr(node.right, Load);
return null;
}
@Override
public Void visit(ExprTy.UnaryOp node) {
checkContext();
validateExpr(node.operand, Load);
return null;
}
@Override
public Void visit(ExprTy.Lambda node) {
checkContext();
visit(node.args);
validateExpr(node.body, Load);
return null;
}
@Override
public Void visit(ExprTy.IfExp node) {
checkContext();
validateExpr(node.test, Load);
validateExpr(node.body, Load);
validateExpr(node.orElse, Load);
return null;
}
@Override
public Void visit(ExprTy.Dict node) {
checkContext();
if (seqLen(node.keys) != seqLen(node.values)) {
throw raiseValueError(ErrorMessages.DICT_DOESNT_HAVE_THE_SAME_NUMBER_OF_KEYS_AS_VALUES);
}
validateExprs(node.keys, Load, true);
validateExprs(node.values, Load, false);
return null;
}
@Override
public Void visit(ExprTy.Set node) {
checkContext();
validateExprs(node.elements, Load, false);
return null;
}
@Override
public Void visit(ExprTy.ListComp node) {
checkContext();
validateComprehension(node.generators);
validateExpr(node.element, Load);
return null;
}
@Override
public Void visit(ExprTy.SetComp node) {
checkContext();
validateComprehension(node.generators);
validateExpr(node.element, Load);
return null;
}
@Override
public Void visit(ExprTy.GeneratorExp node) {
checkContext();
validateComprehension(node.generators);
validateExpr(node.element, Load);
return null;
}
@Override
public Void visit(ExprTy.DictComp node) {
checkContext();
validateComprehension(node.generators);
validateExpr(node.key, Load);
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(ExprTy.Yield node) {
checkContext();
if (node.value != null) {
validateExpr(node.value, Load);
}
return null;
}
@Override
public Void visit(ExprTy.YieldFrom node) {
checkContext();
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(ExprTy.Await node) {
checkContext();
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(ExprTy.Compare node) {
checkContext();
if (seqLen(node.comparators) == 0) {
throw raiseValueError(ErrorMessages.COMPARE_WITH_NO_COMPARATORS);
}
if (seqLen(node.comparators) != seqLen(node.ops)) {
throw raiseValueError(ErrorMessages.COMPARE_HAS_A_DIFFERENT_NUMBER_OF_COMPARATORS_AND_OPERANDS);
}
validateExprs(node.comparators, Load, false);
validateExpr(node.left, Load);
return null;
}
@Override
public Void visit(ExprTy.Call node) {
checkContext();
validateExpr(node.func, Load);
validateExprs(node.args, Load, false);
validateKeywords(node.keywords);
return null;
}
@Override
public Void visit(ExprTy.Constant node) {
checkContext();
validateConstant(node.value);
return null;
}
@Override
public Void visit(ExprTy.JoinedStr node) {
checkContext();
validateExprs(node.values, Load, false);
return null;
}
@Override
public Void visit(ExprTy.FormattedValue node) {
checkContext();
validateExpr(node.value, Load);
if (node.formatSpec != null) {
validateExpr(node.formatSpec, Load);
}
return null;
}
@Override
public Void visit(ExprTy.Attribute node) {
checkContext(node.context);
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(ExprTy.Subscript node) {
checkContext(node.context);
validateExpr(node.slice, Load);
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(ExprTy.Starred node) {
checkContext(node.context);
validateExpr(node.value, node.context);
return null;
}
@Override
public Void visit(ExprTy.Slice node) {
checkContext();
if (node.lower != null) {
validateExpr(node.lower, Load);
}
if (node.upper != null) {
validateExpr(node.upper, Load);
}
if (node.step != null) {
validateExpr(node.step, Load);
}
return null;
}
@Override
public Void visit(ExprTy.List node) {
checkContext(node.context);
validateExprs(node.elements, node.context, false);
return null;
}
@Override
public Void visit(ExprTy.Tuple node) {
checkContext(node.context);
validateExprs(node.elements, node.context, false);
return null;
}
@Override
public Void visit(ExprTy.NamedExpr node) {
checkContext();
if (!(node.target instanceof ExprTy.Name)) {
throw raiseTypeError(NAMEDEXPR_TARGET_MUST_BE_A_NAME);
}
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(ExprTy.Name node) {
validateName(node.id);
checkContext(node.context);
return null;
}
/*-
// PatternTy validation
*/
boolean isStarPatternOk;
// Equivalent of validate_pattern
private void validatePattern(PatternTy pattern, boolean starOk) {
validatePositions(pattern);
boolean prevStarOk = isStarPatternOk;
isStarPatternOk = starOk;
pattern.accept(this);
isStarPatternOk = prevStarOk;
}
// Equivalent of ensure_literal_number
private static boolean ensureLiteralNumber(ExprTy.Constant expr, boolean allowReal, boolean allowImaginary) {
return (allowReal && expr.value.kind == Kind.DOUBLE) || (allowReal && expr.value.kind == Kind.LONG) || (allowImaginary && expr.value.kind == Kind.COMPLEX);
}
// Equivalent of ensure_literal_negative
private static boolean ensureLiteralNegative(UnaryOp expr, boolean allowImaginary) {
if (expr.op != UnaryOpTy.USub) {
return false;
}
if (!(expr.operand instanceof ExprTy.Constant)) {
return false;
}
return ensureLiteralNumber((ExprTy.Constant) expr.operand, true, allowImaginary);
}
// Equivalent of ensure_literal_complex
private static boolean ensureLiteralComplex(ExprTy.BinOp expr) {
if (expr.op != OperatorTy.Add && expr.op != OperatorTy.Sub) {
return false;
}
if (expr.left instanceof ExprTy.Constant) {
if (!ensureLiteralNumber((ExprTy.Constant) expr.left, true, false)) {
return false;
}
} else if (expr.left instanceof ExprTy.UnaryOp) {
if (!ensureLiteralNegative((ExprTy.UnaryOp) expr.left, false)) {
return false;
}
} else {
return false;
}
if (expr.right instanceof ExprTy.Constant) {
return ensureLiteralNumber((ExprTy.Constant) expr.right, false, true);
} else {
return false;
}
}
// Equivalent of validate_pattern_match_value
private void validatePatternMatchValue(ExprTy expr) {
validateExpr(expr, Load);
if (expr instanceof ExprTy.Constant) {
ExprTy.Constant constantExpr = (ExprTy.Constant) expr;
switch (constantExpr.value.kind) {
case LONG:
case DOUBLE:
case BYTES:
case COMPLEX:
case CODEPOINTS:
return;
}
throw raiseValueError(ErrorMessages.UNEXPECTED_CONSTANT_INSIDE_OF_A_LITERAL_PATTERN);
}
if (expr instanceof ExprTy.Attribute) {
return;
}
if (expr instanceof ExprTy.UnaryOp) {
if (ensureLiteralNegative((ExprTy.UnaryOp) expr, true)) {
return;
}
}
if (expr instanceof ExprTy.BinOp) {
if (ensureLiteralComplex((ExprTy.BinOp) expr)) {
return;
}
}
if (expr instanceof ExprTy.JoinedStr) {
return;
}
throw raiseValueError(ErrorMessages.PATTERNS_MAY_ONLY_MATCH_LITERALS_AND_ATTRIBUTE_LOOKUPS);
}
private void validateCapture(String name) {
if (name.equals("_")) {
throw raiseValueError(ErrorMessages.CANT_CAPTURE_NAME_UNDERSCORE_IN_PATTERNS);
}
validateName(name);
}
@Override
public Void visit(PatternTy.MatchValue node) {
validatePatternMatchValue(node.value);
return null;
}
@Override
public Void visit(PatternTy.MatchSingleton node) {
if (node.value.kind != Kind.NONE && node.value.kind != Kind.BOOLEAN) {
throw raiseValueError(ErrorMessages.MATCH_SINGLETON_CAN_ONLY_CONTAIN_TRUE_FALSE_AND_NONE);
}
return null;
}
@Override
public Void visit(PatternTy.MatchSequence node) {
validatePatterns(node.patterns, true);
return null;
}
@Override
public Void visit(PatternTy.MatchMapping node) {
if (seqLen(node.keys) != seqLen(node.patterns)) {
throw raiseValueError(ErrorMessages.MATCH_MAPPING_DOESNT_HAVE_THE_SAME_NUMBER_OF_KEYS_AS_PATTERNS);
}
if (node.rest != null) {
validateCapture(node.rest);
}
if (node.keys != null) {
for (ExprTy key : node.keys) {
if (key instanceof ExprTy.Constant) {
ConstantValue literal = ((ExprTy.Constant) key).value;
if (literal.kind == Kind.NONE || literal.kind == Kind.BOOLEAN) {
continue;
}
}
validatePatternMatchValue(key);
}
}
validatePatterns(node.patterns, false);
return null;
}
@Override
public Void visit(PatternTy.MatchClass node) {
if (seqLen(node.kwdAttrs) != seqLen(node.kwdPatterns)) {
throw raiseValueError(ErrorMessages.MATCH_CLASS_DOESNT_HAVE_THE_SAME_NUMBER_OF_KEYWORD_ATTRIBUTES_AS_PATTERNS);
}
validateExpr(node.cls, Load);
ExprTy cls = node.cls;
while (cls instanceof ExprTy.Attribute) {
cls = ((ExprTy.Attribute) cls).value;
}
if (!(cls instanceof ExprTy.Name)) {
throw raiseValueError(ErrorMessages.MATCH_CLASS_CLS_FIELD_CAN_ONLY_CONTAIN_NAME_OR_ATTRIBUTE_NODES);
}
if (node.kwdAttrs != null) {
for (String identifier : node.kwdAttrs) {
validateName(identifier);
}
}
validatePatterns(node.patterns, false);
validatePatterns(node.kwdPatterns, false);
return null;
}
@Override
public Void visit(PatternTy.MatchStar node) {
if (!isStarPatternOk) {
throw raiseValueError(ErrorMessages.CANT_USE_MATCH_STAR_HERE);
}
if (node.name != null) {
validateCapture(node.name);
}
return null;
}
@Override
public Void visit(PatternTy.MatchAs node) {
if (node.name != null) {
validateCapture(node.name);
}
if (node.pattern != null) {
if (node.name == null) {
throw raiseValueError(ErrorMessages.MATCH_AS_MUST_SPECIFY_A_TARGET_NAME_IF_A_PATTERN_IS_GIVEN);
}
validatePattern(node.pattern, false);
}
return null;
}
@Override
public Void visit(PatternTy.MatchOr node) {
if (seqLen(node.patterns) < 2) {
throw raiseValueError(ErrorMessages.MATCH_OR_REQUIRES_AT_LEAST_2_PATTERNS);
}
validatePatterns(node.patterns, false);
return null;
}
@Override
public Void visit(MatchCaseTy node) {
validatePattern(node.pattern, false);
if (node.guard != null) {
validateExpr(node.guard, Load);
}
validateBody(node.body, T_T_MATCH_CASE);
return null;
}
/*-
// Other visitors
*/
@Override
public Void visit(ComprehensionTy node) {
validateExpr(node.target, Store);
validateExpr(node.iter, Load);
validateExprs(node.ifs, Load, false);
return null;
}
@Override
public Void visit(ExceptHandlerTy.ExceptHandler node) {
if (node.type != null) {
validateExpr(node.type, Load);
}
validateBody(node.body, T_C_EXCEPTHANDLER);
return null;
}
// Equivalent of validate_arguments
@Override
public Void visit(ArgumentsTy node) {
validateArgs(node.posOnlyArgs);
validateArgs(node.args);
if (node.varArg != null && node.varArg.annotation != null) {
validateExpr(node.varArg.annotation, Load);
}
validateArgs(node.kwOnlyArgs);
if (node.kwArg != null && node.kwArg.annotation != null) {
validateExpr(node.kwArg.annotation, Load);
}
if (seqLen(node.defaults) > seqLen(node.posOnlyArgs) + seqLen(node.args)) {
throw raiseValueError(ErrorMessages.MORE_POSITIONAL_DEFAULTS_THAN_ARGS_ON_ARGUMENTS);
}
if (seqLen(node.kwDefaults) != seqLen(node.kwOnlyArgs)) {
throw raiseValueError(ErrorMessages.LENGTH_OF_KWONLYARGS_IS_NOT_THE_SAME_AS_KW_DEFAULTS_ON_ARGUMENTS);
}
validateExprs(node.defaults, Load, false);
validateExprs(node.kwDefaults, Load, true);
return null;
}
@Override
public Void visit(ArgTy node) {
if (node.annotation != null) {
validateExpr(node.annotation, Load);
}
return null;
}
@Override
public Void visit(KeywordTy node) {
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(AliasTy node) {
return null;
}
@Override
public Void visit(WithItemTy node) {
validateExpr(node.contextExpr, Load);
if (node.optionalVars != null) {
validateExpr(node.optionalVars, Store);
}
return null;
}
@Override
public Void visit(TypeIgnoreTy.TypeIgnore node) {
return null;
}
@Override
public Void visit(TypeAlias node) {
if (!(node.name instanceof ExprTy.Name)) {
raiseTypeError(TYPEALIAS_WITH_NON_NAME_NAME);
}
validateExpr(node.name, Store);
validateTypeParams(node.typeParams);
validateExpr(node.value, Load);
return null;
}
@Override
public Void visit(TypeVar node) {
validateName(node.name);
if (node.bound != null) {
validateExpr(node.bound, Load);
}
return null;
}
@Override
public Void visit(ParamSpec node) {
validateName(node.name);
return null;
}
@Override
public Void visit(TypeVarTuple node) {
validateName(node.name);
return null;
}
/*-
// Validation of sequences
*/
// Equivalent of validate_stmts
private void validateStmts(StmtTy[] stmts) {
if (stmts == null) {
return;
}
for (StmtTy stmt : stmts) {
if (stmt != null) {
validateStmt(stmt);
} else {
throw raiseValueError(ErrorMessages.NONE_DISALLOWED_IN_STATEMENT_LIST);
}
}
}
// Equivalent of validate_exprs
private void validateExprs(ExprTy[] exprs, ExprContextTy exprContext, boolean allowNull) {
if (exprs == null) {
return;
}
for (ExprTy expr : exprs) {
if (expr != null) {
validateExpr(expr, exprContext);
} else if (!allowNull) {
throw raiseValueError(ErrorMessages.NONE_DISALLOWED_IN_EXPRESSION_LIST);
}
}
}
// Equivalent of validate_args
private void validateArgs(ArgTy[] args) {
if (args == null) {
return;
}
for (ArgTy arg : args) {
validatePositions(arg);
visit(arg);
}
}
// Equivalent of validate_keywords
private void validateKeywords(KeywordTy[] keywords) {
if (keywords == null) {
return;
}
for (KeywordTy kw : keywords) {
visit(kw);
}
}
// Equivalent of validate_patterns
private void validatePatterns(PatternTy[] patterns, boolean starOk) {
if (patterns == null) {
return;
}
for (PatternTy pattern : patterns) {
validatePattern(pattern, starOk);
}
}
// Equivalent of validate_body
private void validateBody(StmtTy[] body, TruffleString owner) {
validateNonEmptySeq(body, T_F_BODY, owner);
validateStmts(body);
}
// Equivalent of validate_assignlist
private void validateAssignList(ExprTy[] targets, ExprContextTy ctx) {
validateNonEmptySeq(targets, T_F_TARGETS, ctx == Del ? T_C_DELETE : T_C_ASSIGN);
validateExprs(targets, ctx, false);
}
// Equivalent of _validate_nonempty_seq
private void validateNonEmptySeq(Object[] seq, TruffleString what, TruffleString owner) {
if (seqLen(seq) == 0) {
throw raiseValueError(ErrorMessages.EMPTY_S_ON_S, what, owner);
}
}
// Equivalent of validate_comprehension
private void validateComprehension(ComprehensionTy[] generators) {
if (seqLen(generators) == 0) {
throw raiseValueError(ErrorMessages.COMPREHENSION_WITH_NO_GENERATORS);
}
for (ComprehensionTy comp : generators) {
visit(comp);
}
}
// Equivalent of validate_type_params
private void validateTypeParams(TypeParamTy[] typeParams) {
if (typeParams == null) {
return;
}
for (TypeParamTy typeParam : typeParams) {
validatePositions(typeParam);
typeParam.accept(this);
}
}
/*-
// Helpers
*/
// Equivalent of asdl_seq_LEN
private static int seqLen(Object[] seq) {
return seq == null ? 0 : seq.length;
}
// Equivalent of VALIDATE_POSITIONS
private void validatePositions(SSTNode node) {
SourceRange sr = node.getSourceRange();
if (sr.startLine > sr.endLine) {
throw raiseValueError(AST_NODE_LINE_RANGE_IS_NOT_VALID, sr.startLine, sr.endLine);
}
if ((sr.startLine < 0 && sr.endLine != sr.startLine) || (sr.startColumn < 0 && sr.endColumn != sr.startColumn)) {
throw raiseValueError(AST_NODE_COLUMN_RANGE_FOR_LINE_RANGE_IS_NOT_VALID, sr.startColumn, sr.endColumn, sr.startLine, sr.endLine);
}
if (sr.startLine == sr.endLine && sr.startColumn > sr.endColumn) {
throw raiseValueError(LINE_COLUMN_IS_NOT_A_VALID_RANGE, sr.startLine, sr.startColumn, sr.endColumn);
}
}
// Equivalent of validate_name
private void validateName(String id) {
for (String f : FORBIDDEN_NAMES) {
if (f.equals(id)) {
throw raiseValueError(ErrorMessages.IDENTIFIER_FIELD_CANT_REPRESENT_S_CONSTANT, f);
}
}
}
// Equivalent of validate_constant
private void validateConstant(@SuppressWarnings("unused") ConstantValue value) {
// Already done in Obj2SstBase#obj2ConstantValue()
}
private PException raiseValueError(TruffleString format, Object... args) {
throw PRaiseNode.raiseStatic(node, PythonBuiltinClassType.ValueError, format, args);
}
private PException raiseTypeError(TruffleString format, Object... args) {
throw PRaiseNode.raiseStatic(node, PythonBuiltinClassType.TypeError, format, args);
}
}
|
google/closure-compiler | 37,643 | src/com/google/javascript/jscomp/RewriteAsyncIteration.java | /*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.javascript.jscomp.AstFactory.type;
import com.google.javascript.jscomp.colors.StandardColors;
import com.google.javascript.jscomp.parsing.parser.FeatureSet;
import com.google.javascript.jscomp.parsing.parser.FeatureSet.Feature;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.StaticScope;
import com.google.javascript.rhino.Token;
import java.util.ArrayDeque;
import java.util.LinkedHashSet;
import java.util.Set;
import org.jspecify.annotations.Nullable;
/**
* Converts async generator functions into a function returning a new $jscomp.AsyncGenWrapper around
* the original block and awaits/yields converted to yields of ActionRecords.
*
* <pre>{@code
* async function* foo() {
* let res = await myPromise;
* yield res + 1;
* }
* }</pre>
*
* <p>becomes (prefixes trimmed for clarity)
*
* <pre>{@code
* function foo() {
* return new $jscomp.AsyncGeneratorWrapper((function*(){
* let res = yield new $ActionRecord($ActionEnum.AWAIT_VALUE, myPromise);
* yield new $ActionRecord($ActionEnum.YIELD_VALUE, res + 1);
* })());
* }
* }</pre>
*/
public final class RewriteAsyncIteration implements NodeTraversal.Callback, CompilerPass {
private static final FeatureSet transpiledFeatures =
FeatureSet.BARE_MINIMUM.with(Feature.ASYNC_GENERATORS, Feature.FOR_AWAIT_OF);
static final DiagnosticType CANNOT_CONVERT_ASYNCGEN =
DiagnosticType.error("JSC_CANNOT_CONVERT_ASYNCGEN", "Cannot convert async generator. {0}");
private static final String ACTION_RECORD_NAME = "$jscomp.AsyncGeneratorWrapper$ActionRecord";
private static final String ACTION_ENUM_AWAIT =
"$jscomp.AsyncGeneratorWrapper$ActionEnum.AWAIT_VALUE";
private static final String ACTION_ENUM_YIELD =
"$jscomp.AsyncGeneratorWrapper$ActionEnum.YIELD_VALUE";
private static final String ACTION_ENUM_YIELD_STAR =
"$jscomp.AsyncGeneratorWrapper$ActionEnum.YIELD_STAR";
// Variables with these names get created when rewriting for-await-of loops
private static final String FOR_AWAIT_ITERATOR_TEMP_NAME = "$jscomp$forAwait$tempIterator";
private static final String FOR_AWAIT_RESULT_TEMP_NAME = "$jscomp$forAwait$tempResult";
private static final String FOR_AWAIT_ERROR_RESULT_TEMP_NAME = "$jscomp$forAwait$errResult";
private static final String FOR_AWAIT_CATCH_PARAM_TEMP_NAME = "$jscomp$forAwait$catchErrParam";
private static final String FOR_AWAIT_RETURN_FN_TEMP_NAME = "$jscomp$forAwait$retFn";
private int nextForAwaitId = 0;
private final AbstractCompiler compiler;
private final ArrayDeque<LexicalContext> contextStack;
private static final String THIS_VAR_NAME = "$jscomp$asyncIter$this$";
private static final String ARGUMENTS_VAR_NAME = "$jscomp$asyncIter$arguments";
private static final String SUPER_PROP_GETTER_PREFIX = "$jscomp$asyncIter$super$get$";
private final AstFactory astFactory;
private final StaticScope namespace;
/** Tracks a function and its context of this/arguments/super, if such a context exists. */
private static final class LexicalContext {
// Node that creates the context
private final Node contextRoot;
// The current function, or null if root scope where we are not in a function.
private final @Nullable Node function;
// The context of the most recent definition of this/super/arguments
private final @Nullable ThisSuperArgsContext thisSuperArgsContext;
// Represents the global/root scope. Should only exist on the bottom of the contextStack.
private LexicalContext(Node contextRoot) {
this.contextRoot = checkNotNull(contextRoot);
this.function = null;
this.thisSuperArgsContext =
null; // no need for global context to have a this/super/args context
}
/**
* Represents the context of a function or its parameter list.
*
* @param parent enclosing context
* @param contextRoot FUNCTION or PARAM_LIST node
* @param function same as contextRoot or the FUNCTION containing the PARAM_LIST
*/
private LexicalContext(
LexicalContext parent, Node contextRoot, Node function, AbstractCompiler compiler) {
checkNotNull(parent);
checkNotNull(contextRoot);
checkArgument(contextRoot == function || contextRoot.isParamList(), contextRoot);
checkNotNull(function);
checkArgument(function.isFunction(), function);
this.contextRoot = contextRoot;
this.function = function;
if (function.isArrowFunction()) {
// Use the parent context to inherit this, arguments, and super for an arrow function or its
// parameter list.
this.thisSuperArgsContext = parent.thisSuperArgsContext;
} else if (contextRoot.isFunction()) {
// Non-arrow function gets its own context defining `this`, `arguments`, and `super`.
String newUniqueId =
compiler
.getUniqueIdSupplier()
.getUniqueId(compiler.getInput(NodeUtil.getInputId(contextRoot)));
this.thisSuperArgsContext = new ThisSuperArgsContext(this, newUniqueId);
} else {
// contextRoot is a parameter list.
// Never alias `this`, `arguments`, or `super` for normal function parameter lists.
// They are implicitly defined there.
this.thisSuperArgsContext = null;
}
}
static LexicalContext newGlobalContext(Node contextRoot) {
return new LexicalContext(contextRoot);
}
static LexicalContext newContextForFunction(
LexicalContext parent, Node function, AbstractCompiler compiler) {
// Functions need their own context because:
// - async generator functions must be transpiled
// - non-async generator functions must NOT be transpiled
// - arrow functions inside of async generator functions need to have
// `this`, `arguments`, and `super` references aliased, including in their
// parameter lists
return new LexicalContext(parent, function, function, compiler);
}
static LexicalContext newContextForParamList(
LexicalContext parent, Node paramList, AbstractCompiler compiler) {
// Parameter lists need their own context because `this`, `arguments`, and `super` must NOT be
// aliased for non-arrow function parameter lists, even for async generator functions.
return new LexicalContext(parent, paramList, parent.function, compiler);
}
Node getFunctionDeclaringThisArgsSuper() {
return thisSuperArgsContext.ctx.function;
}
/** Is it necessary to replace `this`, `super`, and `arguments` with aliases in this context? */
boolean mustReplaceThisSuperArgs() {
return thisSuperArgsContext != null
&& getFunctionDeclaringThisArgsSuper().isAsyncGeneratorFunction();
}
}
/**
* Tracks how this/arguments/super were used in the function so declarations of replacement
* variables can be prepended
*/
private static final class ThisSuperArgsContext {
/** The LexicalContext representing the function that declared this/super/args */
private final LexicalContext ctx;
private final Set<Node> usedSuperProperties = new LinkedHashSet<>();
@Nullable Node thisNodeToAdd = null;
private boolean usedArguments = false;
// unique id to append to names in this context. This is used to ensure that names
// in different contexts don't collide (e.g. 2 functions don't get the same `let
// $jscomp$async$this` name declared in their bodies)
private final String uniqueId;
ThisSuperArgsContext(LexicalContext ctx, String uniqueId) {
this.ctx = ctx;
this.uniqueId = uniqueId;
}
}
private RewriteAsyncIteration(
AbstractCompiler compiler, AstFactory astFactory, StaticScope namespace) {
this.compiler = checkNotNull(compiler);
this.astFactory = checkNotNull(astFactory);
this.namespace = checkNotNull(namespace);
this.contextStack = new ArrayDeque<>();
}
static RewriteAsyncIteration create(AbstractCompiler compiler) {
AstFactory astFactory = compiler.createAstFactory();
StaticScope namespace = compiler.getTranspilationNamespace();
return new RewriteAsyncIteration(compiler, astFactory, namespace);
}
@Override
public void process(Node externs, Node root) {
checkState(contextStack.isEmpty());
contextStack.push(LexicalContext.newGlobalContext(root));
TranspilationPasses.processTranspile(compiler, root, transpiledFeatures, this);
TranspilationPasses.maybeMarkFeaturesAsTranspiledAway(compiler, root, transpiledFeatures);
checkState(contextStack.element().function == null);
contextStack.remove();
checkState(contextStack.isEmpty());
}
@Override
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
if (n.isFunction()) {
contextStack.push(
LexicalContext.newContextForFunction(contextStack.element(), n, this.compiler));
} else if (n.isParamList()) {
contextStack.push(
LexicalContext.newContextForParamList(contextStack.element(), n, this.compiler));
}
return true;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
LexicalContext ctx = contextStack.element();
switch (n.getToken()) {
// Async Generators (and popping contexts)
case PARAM_LIST:
// Done handling parameter list, so pop its context
checkState(n.equals(ctx.contextRoot), n);
contextStack.pop();
break;
case FUNCTION:
checkState(n.equals(ctx.contextRoot));
if (n.isAsyncGeneratorFunction()) {
convertAsyncGenerator(n);
prependTempVarDeclarations(ctx, t);
}
// Done handling function, so pop its context
contextStack.pop();
break;
case AWAIT:
checkNotNull(ctx.function);
if (ctx.function.isAsyncGeneratorFunction()) {
convertAwaitOfAsyncGenerator(ctx, n);
}
break;
case YIELD: // Includes yield*
checkNotNull(ctx.function);
if (ctx.function.isAsyncGeneratorFunction()) {
convertYieldOfAsyncGenerator(ctx, n);
}
break;
case RETURN:
checkNotNull(ctx.function);
if (ctx.function.isAsyncGeneratorFunction()) {
convertReturnOfAsyncGenerator(ctx, n);
}
break;
// For-Await-Of loops
case FOR_AWAIT_OF:
checkNotNull(ctx.function);
checkState(ctx.function.isAsyncFunction());
replaceForAwaitOf(ctx, n);
NodeUtil.addFeatureToScript(t.getCurrentScript(), Feature.CONST_DECLARATIONS, compiler);
break;
// Maintaining references to this/arguments/super
case THIS:
if (ctx.mustReplaceThisSuperArgs()) {
replaceThis(ctx, n);
}
break;
case NAME:
if (ctx.mustReplaceThisSuperArgs() && n.matchesName("arguments")) {
replaceArguments(ctx, n);
}
break;
case SUPER:
if (ctx.mustReplaceThisSuperArgs()) {
replaceSuper(ctx, n, parent);
}
break;
default:
break;
}
}
/**
* Moves the body of an async generator function into a nested generator function and removes the
* async and generator props from the original function.
*
* <pre>{@code
* async function* foo() {
* bar();
* }
* }</pre>
*
* <p>becomes
*
* <pre>{@code
* function foo() {
* return new $jscomp.AsyncGeneratorWrapper((function*(){
* bar();
* })())
* }
* }</pre>
*
* @param originalFunction the original AsyncGeneratorFunction Node to be converted.
*/
private void convertAsyncGenerator(Node originalFunction) {
checkNotNull(originalFunction);
checkState(originalFunction.isAsyncGeneratorFunction());
Node asyncGeneratorWrapperRef =
astFactory.createQName(this.namespace, "$jscomp.AsyncGeneratorWrapper");
Node innerFunction = astFactory.createEmptyAsyncGeneratorWrapperArgument(null);
Node innerBlock = originalFunction.getLastChild();
innerBlock.detach();
innerFunction.getLastChild().replaceWith(innerBlock);
// Body should be:
// return new $jscomp.AsyncGeneratorWrapper((new function with original block here)());
Node outerBlock =
astFactory.createBlock(
astFactory.createReturn(
astFactory.createNewNode(
asyncGeneratorWrapperRef,
astFactory.createCall(innerFunction, type(StandardColors.GENERATOR_ID)))));
originalFunction.addChildToBack(outerBlock);
originalFunction.setIsAsyncFunction(false);
originalFunction.setIsGeneratorFunction(false);
originalFunction.srcrefTreeIfMissing(originalFunction);
// Both the inner and original functions should be marked as changed.
compiler.reportChangeToChangeScope(originalFunction);
compiler.reportChangeToChangeScope(innerFunction);
}
/**
* Converts an await into a yield of an ActionRecord to perform "AWAIT".
*
* <pre>{@code await myPromise}</pre>
*
* <p>becomes
*
* <pre>{@code yield new ActionRecord(ActionEnum.AWAIT_VALUE, myPromise)}</pre>
*
* @param awaitNode the original await Node to be converted
*/
private void convertAwaitOfAsyncGenerator(LexicalContext ctx, Node awaitNode) {
checkNotNull(awaitNode);
checkState(awaitNode.isAwait());
checkState(ctx != null && ctx.function != null);
checkState(ctx.function.isAsyncGeneratorFunction());
Node expression = awaitNode.removeFirstChild();
checkNotNull(expression, "await needs an expression");
Node newActionRecord =
astFactory.createNewNode(
astFactory.createQName(this.namespace, ACTION_RECORD_NAME),
astFactory.createQName(this.namespace, ACTION_ENUM_AWAIT),
expression);
newActionRecord.srcrefTreeIfMissing(awaitNode);
awaitNode.addChildToFront(newActionRecord);
awaitNode.setToken(Token.YIELD);
}
/**
* Converts a yield into a yield of an ActionRecord to perform "YIELD" or "YIELD_STAR".
*
* <pre>{@code
* yield;
* yield first;
* yield* second;
* }</pre>
*
* <p>becomes
*
* <pre>{@code
* yield new ActionRecord(ActionEnum.YIELD_VALUE, undefined);
* yield new ActionRecord(ActionEnum.YIELD_VALUE, first);
* yield new ActionRecord(ActionEnum.YIELD_STAR, second);
* }</pre>
*
* @param yieldNode the Node to be converted
*/
private void convertYieldOfAsyncGenerator(LexicalContext ctx, Node yieldNode) {
checkNotNull(yieldNode);
checkState(yieldNode.isYield());
checkState(ctx != null && ctx.function != null);
checkState(ctx.function.isAsyncGeneratorFunction());
Node expression = yieldNode.removeFirstChild();
Node newActionRecord =
astFactory.createNewNode(astFactory.createQName(this.namespace, ACTION_RECORD_NAME));
if (yieldNode.isYieldAll()) {
checkNotNull(expression);
// yield* expression becomes new ActionRecord(YIELD_STAR, expression)
newActionRecord.addChildToBack(
astFactory.createQName(this.namespace, ACTION_ENUM_YIELD_STAR));
newActionRecord.addChildToBack(expression);
} else {
if (expression == null) {
expression = NodeUtil.newUndefinedNode(null);
}
// yield expression becomes new ActionRecord(YIELD, expression)
newActionRecord.addChildToBack(astFactory.createQName(this.namespace, ACTION_ENUM_YIELD));
newActionRecord.addChildToBack(expression);
}
newActionRecord.srcrefTreeIfMissing(yieldNode);
yieldNode.addChildToFront(newActionRecord);
yieldNode.putBooleanProp(Node.YIELD_ALL, false);
}
/**
* Converts a return into a return of an ActionRecord.
*
* <pre>{@code
* return;
* return value;
* }</pre>
*
* <p>becomes
*
* <pre>{@code
* return new ActionRecord(ActionEnum.YIELD_VALUE, undefined);
* return new ActionRecord(ActionEnum.YIELD_VALUE, value);
* }</pre>
*
* @param returnNode the Node to be converted
*/
private void convertReturnOfAsyncGenerator(LexicalContext ctx, Node returnNode) {
checkNotNull(returnNode);
checkState(returnNode.isReturn());
checkState(ctx != null && ctx.function != null);
checkState(ctx.function.isAsyncGeneratorFunction());
Node expression = returnNode.removeFirstChild();
Node newActionRecord =
astFactory.createNewNode(astFactory.createQName(this.namespace, ACTION_RECORD_NAME));
if (expression == null) {
expression = NodeUtil.newUndefinedNode(null);
}
// return expression becomes new ActionRecord(YIELD, expression)
newActionRecord.addChildToBack(astFactory.createQName(this.namespace, ACTION_ENUM_YIELD));
newActionRecord.addChildToBack(expression);
newActionRecord.srcrefTreeIfMissing(returnNode);
returnNode.addChildToFront(newActionRecord);
}
/**
* Rewrites for await of loop.
*
* <pre>{@code
* for await (lhs of rhs) { block(); }
* }</pre>
*
* <p>...becomes...
*
* <pre>{@code
* var errorRes, retFn, tmpRes;
* try {
* for (var tmpIterator = makeAsyncIterator(rhs);;) {
* tmpRes = await tmpIterator.next();
* if (tmpRes.done) {
* break;
* }
* lhs = $tmpRes.value;
* {
* block(); // Wrapped in a block in case block re-declares lhs variable.
* }
* }
* } catch(e) {
* errorRes = { error: e };
* } finally {
* try {
* if (tmpRes && !tmpRes.done && (retFn = _tmpIterator.return)) await retFn.call(tmpIterator);
* }
* finally { if (errorRes) throw errorRes.error; }
* }
*
* }</pre>
*/
private void replaceForAwaitOf(LexicalContext ctx, Node forAwaitOf) {
int forAwaitId = nextForAwaitId++;
String iteratorTempName = FOR_AWAIT_ITERATOR_TEMP_NAME + forAwaitId;
String resultTempName = FOR_AWAIT_RESULT_TEMP_NAME + forAwaitId;
String errorResultTempName = FOR_AWAIT_ERROR_RESULT_TEMP_NAME + forAwaitId;
String catchErrorParamTempName = FOR_AWAIT_CATCH_PARAM_TEMP_NAME + forAwaitId;
String returnFuncTempName = FOR_AWAIT_RETURN_FN_TEMP_NAME + forAwaitId;
checkState(forAwaitOf.hasParent(), "Cannot replace parentless for-await-of");
final Node forAwaitOfParent = forAwaitOf.getParent();
final Node replacementPoint;
if (forAwaitOfParent.isLabel()) {
// If the forAwaitOf is a label's statement child, then the label must move with the for upon
// rewriting.
checkState(forAwaitOf.isSecondChildOf(forAwaitOfParent), forAwaitOfParent);
replacementPoint = forAwaitOfParent;
} else {
replacementPoint = forAwaitOf;
}
Node lhs = forAwaitOf.removeFirstChild();
Node rhs = forAwaitOf.removeFirstChild();
Node originalBody = forAwaitOf.removeFirstChild();
// Generate `var tmpIterator = makeAsyncIterator(rhs);`
Node initializer =
astFactory
.createSingleVarNameDeclaration(
iteratorTempName, astFactory.createJSCompMakeAsyncIteratorCall(rhs, this.namespace))
.srcrefTreeIfMissing(rhs);
// IIterableResult<VALUE> - it's a structural type so optimizations treat it as Object
AstFactory.Type iterableResultType = type(StandardColors.TOP_OBJECT);
// Create code `if (tmpRes.done) {break;}`
Node breakIfDone =
astFactory.createIf(
astFactory.createGetProp(
astFactory.createName(resultTempName, iterableResultType),
"done",
type(StandardColors.BOOLEAN)),
astFactory.createBlock(astFactory.createBreak()));
// Assignment statement to be moved from lhs into body of new for-loop
Node lhsAssignment;
final AstFactory.Type resultType;
if (lhs.isValidAssignmentTarget()) {
// In case of "for await (x of _)" just assign into the lhs.
// Generate `lhs = $tmpRes.value;`
resultType = type(lhs);
lhsAssignment =
astFactory.exprResult(
astFactory.createAssign(
lhs,
astFactory.createGetProp(
astFactory.createName(resultTempName, iterableResultType),
"value",
resultType)));
} else if (NodeUtil.isNameDeclaration(lhs)) {
final Node declarationTarget = lhs.getFirstChild();
if (declarationTarget.isName()) {
// `for await (let x of _)`
// Add a child to the `NAME` node to create `let x = res.value`
resultType = type(declarationTarget);
declarationTarget.addChildToBack(
astFactory.createGetProp(
astFactory.createName(resultTempName, iterableResultType), "value", resultType));
} else {
// Generate `for await (let [x, y] of _)`
// Add a child to the DESTRUCTURING_LHS node to create `[x, y] = res.value`
checkState(declarationTarget.isDestructuringLhs(), declarationTarget);
Node destructuringPattern = declarationTarget.getOnlyChild();
resultType = type(destructuringPattern);
declarationTarget.addChildToBack(
astFactory.createGetProp(
astFactory.createName(resultTempName, iterableResultType), "value", resultType));
}
lhsAssignment = lhs;
} else {
throw new AssertionError("unexpected for-await-of lhs");
}
lhsAssignment.srcrefTreeIfMissing(lhs);
// Generate `var errorRes;`
Node errorResDecl =
astFactory
.createSingleVarNameDeclaration(errorResultTempName)
.srcrefTreeIfMissing(forAwaitOf);
// Generate `var tmpRes;`
Node tempResultDecl =
astFactory.createSingleVarNameDeclaration(resultTempName).srcrefTreeIfMissing(forAwaitOf);
// Generate `var returnFunc;`
Node returnFuncDecl =
astFactory
.createSingleVarNameDeclaration(returnFuncTempName)
.srcrefTreeIfMissing(forAwaitOf);
// Generate `tmpRes = await tmpIterator.next()`
Node resultDeclaration =
astFactory.exprResult(
astFactory.createAssign(
resultTempName,
constructAwaitNextResult(ctx, iteratorTempName, resultType, iterableResultType)));
Node newForLoop =
astFactory.createFor(
astFactory.createEmpty(),
astFactory.createEmpty(),
astFactory.createEmpty(),
astFactory.createBlock(
resultDeclaration, breakIfDone, lhsAssignment, ensureBlock(originalBody)));
if (replacementPoint.isLabel()) {
newForLoop = astFactory.createLabel(replacementPoint.getFirstChild().cloneNode(), newForLoop);
}
// Generates code `try { .. newForLoop .. }`
Node tryNode = createOuterTry(newForLoop);
initializer.insertBefore(newForLoop);
// Generate code `catch(e) { errorRes = { error: e }; }`
Node catchNode = createOuterCatch(catchErrorParamTempName, errorResultTempName);
// Generate the finally code block.
Node finallyNode =
createOuterFinally(
ctx,
iterableResultType,
resultType,
resultTempName,
returnFuncTempName,
iteratorTempName,
errorResultTempName);
Node tryCatchFinally = astFactory.createTryCatchFinally(tryNode, catchNode, finallyNode);
replacementPoint.replaceWith(tryCatchFinally);
tryCatchFinally.srcrefTreeIfMissing(replacementPoint);
errorResDecl.insertBefore(tryCatchFinally);
tempResultDecl.insertBefore(tryCatchFinally);
returnFuncDecl.insertBefore(tryCatchFinally);
compiler.reportChangeToEnclosingScope(tryCatchFinally);
}
// Generates code `try { .. newForLoop .. }`
private Node createOuterTry(Node newForLoop) {
Node tryNode = astFactory.createBlock();
tryNode.addChildToBack(newForLoop);
return tryNode;
}
// Generates code `catch(e) { errorRes = { error: e }; }`
private Node createOuterCatch(String catchErrorParamTempName, String errorResultTempName) {
// Generate `errorRes = { error: e };`
Node catchBodyStmt =
astFactory.exprResult(
astFactory.createAssign(
errorResultTempName,
astFactory.createObjectLit(
astFactory.createStringKey(
"error", astFactory.createNameWithUnknownType(catchErrorParamTempName)))));
// Generate `{ errorRes = { error: e }; }`
Node wrapperCatchBlockNode = astFactory.createBlock();
wrapperCatchBlockNode.addChildToBack(catchBodyStmt);
// Generate `catch(e) { errorRes = { error: e }; }`
return astFactory.createCatch(
astFactory.createNameWithUnknownType(catchErrorParamTempName), wrapperCatchBlockNode);
}
/**
* Generates the outer finally code of the rewriting.
*
* <pre>{@code
* finally {
* try {
* if (tmpRes && !tmpRes.done && (retFn = _tmpIterator.return)) await retFn.call(tmpIterator);
* }
* finally { if (errorRes) throw errorRes.error; }
* }
* }</pre>
*/
private Node createOuterFinally(
LexicalContext ctx,
AstFactory.Type iterableResultType,
AstFactory.Type resultType,
String resultTempName,
String returnFuncTempName,
String iteratorTempName,
String errorResultTempName) {
Node finallyNode = astFactory.createBlock();
// Generate `tmpRes`
Node tmpResNameNode = astFactory.createNameWithUnknownType(resultTempName);
Node tmpResDoneGetProp =
astFactory.createGetProp(
astFactory.createName(resultTempName, iterableResultType),
"done",
type(StandardColors.BOOLEAN));
// Generate `tmpRes && !tmpRes.done`
Node and = astFactory.createAnd(tmpResNameNode, astFactory.createNot(tmpResDoneGetProp));
// Generate `(retFn = _tmpIterator.return)`
Node assign =
astFactory.createAssign(
astFactory.createNameWithUnknownType(returnFuncTempName),
astFactory.createGetProp(
astFactory.createName(iteratorTempName, resultType),
"return",
type(StandardColors.UNKNOWN)));
// Generate `(tmpRes && !tmpRes.done && (retFn = _tmpIterator.return))`
Node ifCond = astFactory.createAnd(and, assign);
Node awaitOrYieldStmt = null;
if (ctx.function.isAsyncGeneratorFunction()) {
// We are in an AsyncGenerator and must instead yield an "await" ActionRecord
awaitOrYieldStmt =
astFactory.exprResult(
astFactory.createYield(
iterableResultType,
astFactory.createNewNode(
astFactory.createQName(this.namespace, ACTION_RECORD_NAME),
astFactory.createQName(this.namespace, ACTION_ENUM_AWAIT),
astFactory.createCall(
astFactory.createGetPropWithUnknownType(
astFactory.createName(
returnFuncTempName, type(StandardColors.UNKNOWN)),
"call"),
type(StandardColors.UNKNOWN),
astFactory.createName(iteratorTempName, resultType)))));
} else {
// Generate `await retFn.call(tmpIterator);`
awaitOrYieldStmt =
astFactory.exprResult(
astFactory.createAwait(
iterableResultType,
astFactory.createCall(
astFactory.createGetPropWithUnknownType(
astFactory.createName(returnFuncTempName, type(StandardColors.UNKNOWN)),
"call"),
type(StandardColors.PROMISE_ID),
astFactory.createName(iteratorTempName, resultType))));
}
Node ifBody = astFactory.createBlock();
ifBody.addChildToBack(awaitOrYieldStmt);
Node ifBlock = astFactory.createIf(ifCond, ifBody);
Node innerTryBlock = astFactory.createBlock();
innerTryBlock.addChildToBack(ifBlock);
// `finally { if (errorRes) throw errorRes.error; }`
Node innerFinallyBlock = astFactory.createBlock();
// if (errorRes) throw errorRes.error;
Node secondIfBody = astFactory.createBlock();
Node throwStmt =
astFactory.createThrow(
astFactory.createGetPropWithUnknownType(
astFactory.createNameWithUnknownType(errorResultTempName), "error"));
secondIfBody.addChildToBack(throwStmt);
Node secondIfCond = astFactory.createNameWithUnknownType(errorResultTempName);
Node secondIfBlock = astFactory.createIf(secondIfCond, secondIfBody);
innerFinallyBlock.addChildToBack(secondIfBlock);
Node finallyBody = astFactory.createTryFinally(innerTryBlock, innerFinallyBlock);
finallyNode.addChildToBack(finallyBody);
return finallyNode;
}
private Node ensureBlock(Node possiblyBlock) {
return possiblyBlock.isBlock()
? possiblyBlock
: astFactory.createBlock(possiblyBlock).srcref(possiblyBlock);
}
private Node constructAwaitNextResult(
LexicalContext ctx,
String iteratorTempName,
AstFactory.Type iteratorType,
AstFactory.Type iterableResultType) {
checkNotNull(ctx.function);
Node result;
Node iteratorTemp = astFactory.createName(iteratorTempName, iteratorType);
if (ctx.function.isAsyncGeneratorFunction()) {
// We are in an AsyncGenerator and must instead yield an "await" ActionRecord
result =
astFactory.createYield(
iterableResultType,
astFactory.createNewNode(
astFactory.createQName(this.namespace, ACTION_RECORD_NAME),
astFactory.createQName(this.namespace, ACTION_ENUM_AWAIT),
astFactory.createCallWithUnknownType(
astFactory.createGetPropWithUnknownType(iteratorTemp, "next"))));
} else {
result =
astFactory.createAwait(
iterableResultType,
astFactory.createCall(
astFactory.createGetPropWithUnknownType(iteratorTemp, "next"),
type(StandardColors.PROMISE_ID)));
}
return result;
}
private void replaceThis(LexicalContext ctx, Node n) {
checkArgument(n.isThis());
checkArgument(ctx != null && ctx.mustReplaceThisSuperArgs());
checkArgument(ctx.function != null, "Cannot prepend declarations to root scope");
checkNotNull(ctx.thisSuperArgsContext);
n.replaceWith(
astFactory
.createName(THIS_VAR_NAME + ctx.thisSuperArgsContext.uniqueId, type(n))
.srcref(n));
ctx.thisSuperArgsContext.thisNodeToAdd = astFactory.createThis(type(n));
compiler.reportChangeToChangeScope(ctx.function);
}
private void replaceArguments(LexicalContext ctx, Node n) {
checkArgument(n.isName() && "arguments".equals(n.getString()));
checkArgument(ctx != null && ctx.mustReplaceThisSuperArgs());
checkArgument(ctx.function != null, "Cannot prepend declarations to root scope");
checkNotNull(ctx.thisSuperArgsContext);
n.replaceWith(astFactory.createName(ARGUMENTS_VAR_NAME, type(n)).srcref(n));
ctx.thisSuperArgsContext.usedArguments = true;
compiler.reportChangeToChangeScope(ctx.function);
}
private void replaceSuper(LexicalContext ctx, Node n, Node parent) {
if (!parent.isGetProp()) {
compiler.report(
JSError.make(
parent,
CANNOT_CONVERT_ASYNCGEN,
"super only allowed with getprop (like super.foo(), not super['foo']())"));
return;
}
checkArgument(n.isSuper());
checkArgument(ctx != null && ctx.mustReplaceThisSuperArgs());
checkArgument(ctx.function != null, "Cannot prepend declarations to root scope");
checkNotNull(ctx.thisSuperArgsContext);
String propertyName = parent.getString();
String propertyReplacementNameText = SUPER_PROP_GETTER_PREFIX + propertyName;
// super.x => $super$get$x()
Node getPropReplacement =
astFactory.createCall(
astFactory.createName(propertyReplacementNameText, type(StandardColors.TOP_OBJECT)),
type(parent));
Node grandparent = parent.getParent();
if (grandparent.isCall() && grandparent.getFirstChild() == parent) {
// super.x(...) => super.x.call($this, ...)
getPropReplacement = astFactory.createGetPropWithUnknownType(getPropReplacement, "call");
ctx.thisSuperArgsContext.thisNodeToAdd =
astFactory.createThisForEs6ClassMember(ctx.contextRoot.getParent());
astFactory
.createName(
THIS_VAR_NAME + ctx.thisSuperArgsContext.uniqueId,
type(ctx.thisSuperArgsContext.thisNodeToAdd))
.srcref(parent)
.insertAfter(parent);
}
getPropReplacement.srcrefTree(parent);
parent.replaceWith(getPropReplacement);
ctx.thisSuperArgsContext.usedSuperProperties.add(parent);
compiler.reportChangeToChangeScope(ctx.function);
}
/**
* Prepends this/super/argument replacement variables to the top of the context's block
*
* <pre>{@code
* function() {
* return new AsyncGenWrapper(function*() {
* // code using replacements for this and super.foo
* }())
* }
* }</pre>
*
* will be converted to
*
* <pre>{@code
* function() {
* const $jscomp$asyncIter$this = this;
* const $jscomp$asyncIter$super$get$foo = () => super.foo;
* return new AsyncGenWrapper(function*() {
* // code using replacements for this and super.foo
* }())
* }
* }</pre>
*/
private void prependTempVarDeclarations(LexicalContext ctx, NodeTraversal t) {
checkArgument(ctx != null);
checkArgument(ctx.function != null, "Cannot prepend declarations to root scope");
checkNotNull(ctx.thisSuperArgsContext);
ThisSuperArgsContext thisSuperArgsCtx = ctx.thisSuperArgsContext;
Node function = ctx.function;
Node block = function.getLastChild();
checkNotNull(block, function);
Node prefixBlock = astFactory.createBlock(); // Temporary block to hold all declarations
if (thisSuperArgsCtx.thisNodeToAdd != null) {
// { // prefixBlock
// const $jscomp$asyncIter$this = this;
// }
prefixBlock.addChildToBack(
astFactory
.createSingleConstNameDeclaration(
THIS_VAR_NAME + thisSuperArgsCtx.uniqueId, thisSuperArgsCtx.thisNodeToAdd)
.srcrefTree(block));
}
if (thisSuperArgsCtx.usedArguments) {
// { // prefixBlock
// const $jscomp$asyncIter$this = this;
// const $jscomp$asyncIter$arguments = arguments;
// }
prefixBlock.addChildToBack(
astFactory
.createSingleConstNameDeclaration(
ARGUMENTS_VAR_NAME, astFactory.createArgumentsReference())
.srcrefTree(block));
}
for (Node replacedMethodReference : thisSuperArgsCtx.usedSuperProperties) {
prefixBlock.addChildToBack(createSuperMethodReferenceGetter(replacedMethodReference, t));
}
prefixBlock.srcrefTreeIfMissing(block);
// Pulls all declarations out of prefixBlock and prepends in block
// block: {
// // declarations
// // code using this/super/args
// }
block.addChildrenToFront(prefixBlock.removeChildren());
if (thisSuperArgsCtx.thisNodeToAdd != null
|| thisSuperArgsCtx.usedArguments
|| !thisSuperArgsCtx.usedSuperProperties.isEmpty()) {
compiler.reportChangeToChangeScope(function);
NodeUtil.addFeatureToScript(t.getCurrentScript(), Feature.CONST_DECLARATIONS, compiler);
}
}
private Node createSuperMethodReferenceGetter(Node replacedMethodReference, NodeTraversal t) {
// const super$get$x = () => { return super.x; };
AstFactory.Type typeOfSuper = type(replacedMethodReference.getFirstChild());
Node superReference = astFactory.createSuper(typeOfSuper);
String replacedMethodName = replacedMethodReference.getString();
Node arrowFunction =
astFactory.createZeroArgArrowFunctionForExpression(
astFactory.createBlock(
astFactory.createReturn(
astFactory.createGetProp(
superReference, replacedMethodName, type(replacedMethodReference)))));
compiler.reportChangeToChangeScope(arrowFunction);
NodeUtil.addFeatureToScript(t.getCurrentScript(), Feature.ARROW_FUNCTIONS, compiler);
String superReplacementName = SUPER_PROP_GETTER_PREFIX + replacedMethodName;
return astFactory.createSingleConstNameDeclaration(superReplacementName, arrowFunction);
}
}
|
apache/commons-math | 37,849 | commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/linear/RealMatrix.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math4.legacy.linear;
import org.apache.commons.math4.legacy.exception.DimensionMismatchException;
import org.apache.commons.math4.legacy.exception.NoDataException;
import org.apache.commons.math4.legacy.exception.NotPositiveException;
import org.apache.commons.math4.legacy.exception.NotStrictlyPositiveException;
import org.apache.commons.math4.legacy.exception.NullArgumentException;
import org.apache.commons.math4.legacy.exception.NumberIsTooSmallException;
import org.apache.commons.math4.legacy.exception.OutOfRangeException;
/**
* Interface defining a real-valued matrix with basic algebraic operations.
* <p>
* Matrix element indexing is 0-based -- e.g., <code>getEntry(0, 0)</code>
* returns the element in the first row, first column of the matrix.</p>
*
*/
public interface RealMatrix extends AnyMatrix {
/**
* Create a new RealMatrix of the same type as the instance with the
* supplied
* row and column dimensions.
*
* @param rowDimension the number of rows in the new matrix
* @param columnDimension the number of columns in the new matrix
* @return a new matrix of the same type as the instance
* @throws NotStrictlyPositiveException if row or column dimension is not
* positive.
* @since 2.0
*/
RealMatrix createMatrix(int rowDimension, int columnDimension)
throws NotStrictlyPositiveException;
/**
* Returns a (deep) copy of this.
*
* @return matrix copy
*/
RealMatrix copy();
/**
* Returns the sum of {@code this} and {@code m}.
*
* @param m matrix to be added
* @return {@code this + m}
* @throws MatrixDimensionMismatchException if {@code m} is not the same
* size as {@code this}.
*/
RealMatrix add(RealMatrix m)
throws MatrixDimensionMismatchException;
/**
* Returns {@code this} minus {@code m}.
*
* @param m matrix to be subtracted
* @return {@code this - m}
* @throws MatrixDimensionMismatchException if {@code m} is not the same
* size as {@code this}.
*/
RealMatrix subtract(RealMatrix m)
throws MatrixDimensionMismatchException;
/**
* Returns the result of adding {@code d} to each entry of {@code this}.
*
* @param d value to be added to each entry
* @return {@code d + this}
*/
RealMatrix scalarAdd(double d);
/**
* Returns the result of multiplying each entry of {@code this} by
* {@code d}.
*
* @param d value to multiply all entries by
* @return {@code d * this}
*/
RealMatrix scalarMultiply(double d);
/**
* Returns the result of postmultiplying {@code this} by {@code m}.
*
* @param m matrix to postmultiply by
* @return {@code this * m}
* @throws DimensionMismatchException if
* {@code columnDimension(this) != rowDimension(m)}
*/
RealMatrix multiply(RealMatrix m)
throws DimensionMismatchException;
/**
* Returns the result of premultiplying {@code this} by {@code m}.
*
* @param m matrix to premultiply by
* @return {@code m * this}
* @throws DimensionMismatchException if
* {@code rowDimension(this) != columnDimension(m)}
*/
RealMatrix preMultiply(RealMatrix m)
throws DimensionMismatchException;
/**
* Returns the result of multiplying {@code this} with itself {@code p}
* times. Depending on the underlying storage, instability for high powers
* might occur.
*
* @param p raise {@code this} to power {@code p}
* @return {@code this^p}
* @throws NotPositiveException if {@code p < 0}
* @throws NonSquareMatrixException if the matrix is not square
*/
RealMatrix power(int p)
throws NotPositiveException, NonSquareMatrixException;
/**
* Returns matrix entries as a two-dimensional array.
*
* @return 2-dimensional array of entries
*/
double[][] getData();
/**
* Returns the <a href="http://mathworld.wolfram.com/MaximumAbsoluteRowSumNorm.html">
* maximum absolute row sum norm</a> of the matrix.
*
* @return norm
*/
double getNorm();
/**
* Returns the <a href="http://mathworld.wolfram.com/FrobeniusNorm.html">
* Frobenius norm</a> of the matrix.
*
* @return norm
*/
double getFrobeniusNorm();
/**
* Gets a submatrix. Rows and columns are indicated
* counting from 0 to n-1.
*
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index (inclusive)
* @return The subMatrix containing the data of the
* specified rows and columns.
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
*/
RealMatrix getSubMatrix(int startRow, int endRow, int startColumn,
int endColumn)
throws OutOfRangeException, NumberIsTooSmallException;
/**
* Gets a submatrix. Rows and columns are indicated counting from 0 to n-1.
*
* @param selectedRows Array of row indices.
* @param selectedColumns Array of column indices.
* @return The subMatrix containing the data in the specified rows and
* columns
* @throws NullArgumentException if the row or column selections are
* {@code null}
* @throws NoDataException if the row or column selections are empty (zero
* length).
* @throws OutOfRangeException if the indices are not valid.
*/
RealMatrix getSubMatrix(int[] selectedRows, int[] selectedColumns)
throws NullArgumentException, NoDataException, OutOfRangeException;
/**
* Copy a submatrix. Rows and columns are indicated counting from 0 to n-1.
*
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index (inclusive)
* @param destination The arrays where the submatrix data should be copied
* (if larger than rows/columns counts, only the upper-left part will be
* used)
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
* @throws MatrixDimensionMismatchException if the destination array is too
* small.
*/
void copySubMatrix(int startRow, int endRow, int startColumn,
int endColumn, double[][] destination)
throws OutOfRangeException, NumberIsTooSmallException,
MatrixDimensionMismatchException;
/**
* Copy a submatrix. Rows and columns are indicated counting from 0 to n-1.
*
* @param selectedRows Array of row indices.
* @param selectedColumns Array of column indices.
* @param destination The arrays where the submatrix data should be copied
* (if larger than rows/columns counts, only the upper-left part will be
* used)
* @throws NullArgumentException if the row or column selections are
* {@code null}
* @throws NoDataException if the row or column selections are empty (zero
* length).
* @throws OutOfRangeException if the indices are not valid.
* @throws MatrixDimensionMismatchException if the destination array is too
* small.
*/
void copySubMatrix(int[] selectedRows, int[] selectedColumns,
double[][] destination)
throws OutOfRangeException, NullArgumentException, NoDataException,
MatrixDimensionMismatchException;
/**
* Replace the submatrix starting at {@code row, column} using data in the
* input {@code subMatrix} array. Indexes are 0-based.
* <p>
* Example:<br>
* Starting with <pre>
* 1 2 3 4
* 5 6 7 8
* 9 0 1 2
* </pre>
* and <code>subMatrix = {{3, 4} {5,6}}</code>, invoking
* {@code setSubMatrix(subMatrix,1,1))} will result in <pre>
* 1 2 3 4
* 5 3 4 8
* 9 5 6 2
* </pre>
*
* @param subMatrix array containing the submatrix replacement data
* @param row row coordinate of the top, left element to be replaced
* @param column column coordinate of the top, left element to be replaced
* @throws NoDataException if {@code subMatrix} is empty.
* @throws OutOfRangeException if {@code subMatrix} does not fit into
* this matrix from element in {@code (row, column)}.
* @throws DimensionMismatchException if {@code subMatrix} is not rectangular
* (not all rows have the same length) or empty.
* @throws NullArgumentException if {@code subMatrix} is {@code null}.
* @since 2.0
*/
void setSubMatrix(double[][] subMatrix, int row, int column)
throws NoDataException, OutOfRangeException,
DimensionMismatchException, NullArgumentException;
/**
* Get the entries at the given row index as a row matrix. Row indices start
* at 0.
*
* @param row Row to be fetched.
* @return row Matrix.
* @throws OutOfRangeException if the specified row index is invalid.
*/
RealMatrix getRowMatrix(int row) throws OutOfRangeException;
/**
* Sets the specified {@code row} of {@code this} matrix to the entries of
* the specified row {@code matrix}. Row indices start at 0.
*
* @param row Row to be set.
* @param matrix Row matrix to be copied (must have one row and the same
* number of columns as the instance).
* @throws OutOfRangeException if the specified row index is invalid.
* @throws MatrixDimensionMismatchException if the row dimension of the
* {@code matrix} is not {@code 1}, or the column dimensions of {@code this}
* and {@code matrix} do not match.
*/
void setRowMatrix(int row, RealMatrix matrix)
throws OutOfRangeException, MatrixDimensionMismatchException;
/**
* Get the entries at the given column index as a column matrix. Column
* indices start at 0.
*
* @param column Column to be fetched.
* @return column Matrix.
* @throws OutOfRangeException if the specified column index is invalid.
*/
RealMatrix getColumnMatrix(int column)
throws OutOfRangeException;
/**
* Sets the specified {@code column} of {@code this} matrix to the entries
* of the specified column {@code matrix}. Column indices start at 0.
*
* @param column Column to be set.
* @param matrix Column matrix to be copied (must have one column and the
* same number of rows as the instance).
* @throws OutOfRangeException if the specified column index is invalid.
* @throws MatrixDimensionMismatchException if the column dimension of the
* {@code matrix} is not {@code 1}, or the row dimensions of {@code this}
* and {@code matrix} do not match.
*/
void setColumnMatrix(int column, RealMatrix matrix)
throws OutOfRangeException, MatrixDimensionMismatchException;
/**
* Returns the entries in row number {@code row} as a vector. Row indices
* start at 0.
*
* @param row Row to be fetched.
* @return a row vector.
* @throws OutOfRangeException if the specified row index is invalid.
*/
RealVector getRowVector(int row)
throws OutOfRangeException;
/**
* Sets the specified {@code row} of {@code this} matrix to the entries of
* the specified {@code vector}. Row indices start at 0.
*
* @param row Row to be set.
* @param vector row vector to be copied (must have the same number of
* column as the instance).
* @throws OutOfRangeException if the specified row index is invalid.
* @throws MatrixDimensionMismatchException if the {@code vector} dimension
* does not match the column dimension of {@code this} matrix.
*/
void setRowVector(int row, RealVector vector)
throws OutOfRangeException, MatrixDimensionMismatchException;
/**
* Get the entries at the given column index as a vector. Column indices
* start at 0.
*
* @param column Column to be fetched.
* @return a column vector.
* @throws OutOfRangeException if the specified column index is invalid
*/
RealVector getColumnVector(int column)
throws OutOfRangeException;
/**
* Sets the specified {@code column} of {@code this} matrix to the entries
* of the specified {@code vector}. Column indices start at 0.
*
* @param column Column to be set.
* @param vector column vector to be copied (must have the same number of
* rows as the instance).
* @throws OutOfRangeException if the specified column index is invalid.
* @throws MatrixDimensionMismatchException if the {@code vector} dimension
* does not match the row dimension of {@code this} matrix.
*/
void setColumnVector(int column, RealVector vector)
throws OutOfRangeException, MatrixDimensionMismatchException;
/**
* Get the entries at the given row index. Row indices start at 0.
*
* @param row Row to be fetched.
* @return the array of entries in the row.
* @throws OutOfRangeException if the specified row index is not valid.
*/
double[] getRow(int row) throws OutOfRangeException;
/**
* Sets the specified {@code row} of {@code this} matrix to the entries
* of the specified {@code array}. Row indices start at 0.
*
* @param row Row to be set.
* @param array Row matrix to be copied (must have the same number of
* columns as the instance)
* @throws OutOfRangeException if the specified row index is invalid.
* @throws MatrixDimensionMismatchException if the {@code array} length does
* not match the column dimension of {@code this} matrix.
*/
void setRow(int row, double[] array)
throws OutOfRangeException, MatrixDimensionMismatchException;
/**
* Get the entries at the given column index as an array. Column indices
* start at 0.
*
* @param column Column to be fetched.
* @return the array of entries in the column.
* @throws OutOfRangeException if the specified column index is not valid.
*/
double[] getColumn(int column) throws OutOfRangeException;
/**
* Sets the specified {@code column} of {@code this} matrix to the entries
* of the specified {@code array}. Column indices start at 0.
*
* @param column Column to be set.
* @param array Column array to be copied (must have the same number of
* rows as the instance).
* @throws OutOfRangeException if the specified column index is invalid.
* @throws MatrixDimensionMismatchException if the {@code array} length does
* not match the row dimension of {@code this} matrix.
*/
void setColumn(int column, double[] array)
throws OutOfRangeException, MatrixDimensionMismatchException;
/**
* Get the entry in the specified row and column. Row and column indices
* start at 0.
*
* @param row Row index of entry to be fetched.
* @param column Column index of entry to be fetched.
* @return the matrix entry at {@code (row, column)}.
* @throws OutOfRangeException if the row or column index is not valid.
*/
double getEntry(int row, int column) throws OutOfRangeException;
/**
* Set the entry in the specified row and column. Row and column indices
* start at 0.
*
* @param row Row index of entry to be set.
* @param column Column index of entry to be set.
* @param value the new value of the entry.
* @throws OutOfRangeException if the row or column index is not valid
* @since 2.0
*/
void setEntry(int row, int column, double value) throws OutOfRangeException;
/**
* Adds (in place) the specified value to the specified entry of
* {@code this} matrix. Row and column indices start at 0.
*
* @param row Row index of the entry to be modified.
* @param column Column index of the entry to be modified.
* @param increment value to add to the matrix entry.
* @throws OutOfRangeException if the row or column index is not valid.
* @since 2.0
*/
void addToEntry(int row, int column, double increment) throws OutOfRangeException;
/**
* Multiplies (in place) the specified entry of {@code this} matrix by the
* specified value. Row and column indices start at 0.
*
* @param row Row index of the entry to be modified.
* @param column Column index of the entry to be modified.
* @param factor Multiplication factor for the matrix entry.
* @throws OutOfRangeException if the row or column index is not valid.
* @since 2.0
*/
void multiplyEntry(int row, int column, double factor) throws OutOfRangeException;
/**
* Returns the transpose of this matrix.
*
* @return transpose matrix
*/
RealMatrix transpose();
/**
* Returns the <a href="http://mathworld.wolfram.com/MatrixTrace.html">
* trace</a> of the matrix (the sum of the elements on the main diagonal).
*
* @return the trace.
* @throws NonSquareMatrixException if the matrix is not square.
*/
double getTrace() throws NonSquareMatrixException;
/**
* Returns the result of multiplying this by the vector {@code v}.
*
* @param v the vector to operate on
* @return {@code this * v}
* @throws DimensionMismatchException if the length of {@code v} does not
* match the column dimension of {@code this}.
*/
double[] operate(double[] v) throws DimensionMismatchException;
/**
* Returns the result of multiplying this by the vector {@code v}.
*
* @param v the vector to operate on
* @return {@code this * v}
* @throws DimensionMismatchException if the dimension of {@code v} does not
* match the column dimension of {@code this}.
*/
RealVector operate(RealVector v) throws DimensionMismatchException;
/**
* Returns the (row) vector result of premultiplying this by the vector {@code v}.
*
* @param v the row vector to premultiply by
* @return {@code v * this}
* @throws DimensionMismatchException if the length of {@code v} does not
* match the row dimension of {@code this}.
*/
double[] preMultiply(double[] v) throws DimensionMismatchException;
/**
* Returns the (row) vector result of premultiplying this by the vector {@code v}.
*
* @param v the row vector to premultiply by
* @return {@code v * this}
* @throws DimensionMismatchException if the dimension of {@code v} does not
* match the row dimension of {@code this}.
*/
RealVector preMultiply(RealVector v) throws DimensionMismatchException;
/**
* Visit (and possibly change) all matrix entries in row order.
* <p>Row order starts at upper left and iterating through all elements
* of a row from left to right before going to the leftmost element
* of the next row.</p>
* @param visitor visitor used to process all matrix entries
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixChangingVisitor#end()} at the end
* of the walk
*/
double walkInRowOrder(RealMatrixChangingVisitor visitor);
/**
* Visit (but don't change) all matrix entries in row order.
* <p>Row order starts at upper left and iterating through all elements
* of a row from left to right before going to the leftmost element
* of the next row.</p>
* @param visitor visitor used to process all matrix entries
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixPreservingVisitor#end()} at the end
* of the walk
*/
double walkInRowOrder(RealMatrixPreservingVisitor visitor);
/**
* Visit (and possibly change) some matrix entries in row order.
* <p>Row order starts at upper left and iterating through all elements
* of a row from left to right before going to the leftmost element
* of the next row.</p>
* @param visitor visitor used to process all matrix entries
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixChangingVisitor#end()} at the end
* of the walk
*/
double walkInRowOrder(RealMatrixChangingVisitor visitor, int startRow,
int endRow, int startColumn, int endColumn)
throws OutOfRangeException, NumberIsTooSmallException;
/**
* Visit (but don't change) some matrix entries in row order.
* <p>Row order starts at upper left and iterating through all elements
* of a row from left to right before going to the leftmost element
* of the next row.</p>
* @param visitor visitor used to process all matrix entries
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixPreservingVisitor#end()} at the end
* of the walk
*/
double walkInRowOrder(RealMatrixPreservingVisitor visitor, int startRow,
int endRow, int startColumn, int endColumn)
throws OutOfRangeException, NumberIsTooSmallException;
/**
* Visit (and possibly change) all matrix entries in column order.
* <p>Column order starts at upper left and iterating through all elements
* of a column from top to bottom before going to the topmost element
* of the next column.</p>
* @param visitor visitor used to process all matrix entries
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixChangingVisitor#end()} at the end
* of the walk
*/
double walkInColumnOrder(RealMatrixChangingVisitor visitor);
/**
* Visit (but don't change) all matrix entries in column order.
* <p>Column order starts at upper left and iterating through all elements
* of a column from top to bottom before going to the topmost element
* of the next column.</p>
* @param visitor visitor used to process all matrix entries
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixPreservingVisitor#end()} at the end
* of the walk
*/
double walkInColumnOrder(RealMatrixPreservingVisitor visitor);
/**
* Visit (and possibly change) some matrix entries in column order.
* <p>Column order starts at upper left and iterating through all elements
* of a column from top to bottom before going to the topmost element
* of the next column.</p>
* @param visitor visitor used to process all matrix entries
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixChangingVisitor#end()} at the end
* of the walk
*/
double walkInColumnOrder(RealMatrixChangingVisitor visitor, int startRow,
int endRow, int startColumn, int endColumn)
throws OutOfRangeException, NumberIsTooSmallException;
/**
* Visit (but don't change) some matrix entries in column order.
* <p>Column order starts at upper left and iterating through all elements
* of a column from top to bottom before going to the topmost element
* of the next column.</p>
* @param visitor visitor used to process all matrix entries
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixPreservingVisitor#end()} at the end
* of the walk
*/
double walkInColumnOrder(RealMatrixPreservingVisitor visitor, int startRow,
int endRow, int startColumn, int endColumn)
throws OutOfRangeException, NumberIsTooSmallException;
/**
* Visit (and possibly change) all matrix entries using the fastest possible order.
* <p>The fastest walking order depends on the exact matrix class. It may be
* different from traditional row or column orders.</p>
* @param visitor visitor used to process all matrix entries
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixChangingVisitor#end()} at the end
* of the walk
*/
double walkInOptimizedOrder(RealMatrixChangingVisitor visitor);
/**
* Visit (but don't change) all matrix entries using the fastest possible order.
* <p>The fastest walking order depends on the exact matrix class. It may be
* different from traditional row or column orders.</p>
* @param visitor visitor used to process all matrix entries
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixPreservingVisitor#end()} at the end
* of the walk
*/
double walkInOptimizedOrder(RealMatrixPreservingVisitor visitor);
/**
* Visit (and possibly change) some matrix entries using the fastest possible order.
* <p>The fastest walking order depends on the exact matrix class. It may be
* different from traditional row or column orders.</p>
* @param visitor visitor used to process all matrix entries
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index (inclusive)
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixChangingVisitor#end()} at the end
* of the walk
*/
double walkInOptimizedOrder(RealMatrixChangingVisitor visitor,
int startRow, int endRow, int startColumn, int endColumn)
throws OutOfRangeException, NumberIsTooSmallException;
/**
* Visit (but don't change) some matrix entries using the fastest possible order.
* <p>The fastest walking order depends on the exact matrix class. It may be
* different from traditional row or column orders.</p>
* @param visitor visitor used to process all matrix entries
* @param startRow Initial row index
* @param endRow Final row index (inclusive)
* @param startColumn Initial column index
* @param endColumn Final column index (inclusive)
* @throws OutOfRangeException if the indices are not valid.
* @throws NumberIsTooSmallException if {@code endRow < startRow} or
* {@code endColumn < startColumn}.
* @see #walkInRowOrder(RealMatrixChangingVisitor)
* @see #walkInRowOrder(RealMatrixPreservingVisitor)
* @see #walkInRowOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInRowOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixChangingVisitor)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor)
* @see #walkInColumnOrder(RealMatrixChangingVisitor, int, int, int, int)
* @see #walkInColumnOrder(RealMatrixPreservingVisitor, int, int, int, int)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor)
* @see #walkInOptimizedOrder(RealMatrixPreservingVisitor)
* @see #walkInOptimizedOrder(RealMatrixChangingVisitor, int, int, int, int)
* @return the value returned by {@link RealMatrixPreservingVisitor#end()} at the end
* of the walk
*/
double walkInOptimizedOrder(RealMatrixPreservingVisitor visitor,
int startRow, int endRow, int startColumn, int endColumn)
throws OutOfRangeException, NumberIsTooSmallException;
}
|
apache/tomcat80 | 37,547 | modules/tomcat-lite/java/org/apache/tomcat/lite/http/BaseMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.lite.http;
import java.io.IOException;
import java.util.logging.Logger;
import org.apache.tomcat.lite.http.HttpChannel.HttpService;
import org.apache.tomcat.lite.io.CBuffer;
import org.apache.tomcat.lite.io.FileConnector;
import org.apache.tomcat.lite.io.BBucket;
/**
* Mapper, which implements the servlet API mapping rules (which are derived
* from the HTTP rules).
*
* This class doesn't use JNDI.
*/
public class BaseMapper {
private static Logger logger =
Logger.getLogger(BaseMapper.class.getName());
// TODO:
/**
* Mapping should be done on bytes - as received from net, before
* translation to chars. This would allow setting the default charset
* for the context - or even executing the servlet and letting it specify
* the charset to use for further decoding.
*
*/
public static interface Mapper {
public void map(BBucket host, BBucket url, MappingData md);
}
/**
* Like BaseMapper, for a Context.
*/
public static class ServiceMapper extends BaseMapper {
/**
* Context associated with this wrapper, used for wrapper mapping.
*/
public BaseMapper.Context contextMapElement = new BaseMapper.Context(this);
/**
* Set context, used for wrapper mapping (request dispatcher).
*
* @param welcomeResources Welcome files defined for this context
*/
public void setContext(String path, String[] welcomeResources) {
contextMapElement.name = path;
contextMapElement.welcomeResources = welcomeResources;
}
/**
* Add a wrapper to the context associated with this wrapper.
*
* @param path Wrapper mapping
* @param wrapper The Wrapper object
*/
public void addWrapper(String path, Object wrapper) {
addWrapper(contextMapElement, path, wrapper);
}
public void addWrapper(String path, Object wrapper, boolean jspWildCard) {
addWrapper(contextMapElement, path, wrapper, jspWildCard);
}
/**
* Remove a wrapper from the context associated with this wrapper.
*
* @param path Wrapper mapping
*/
public void removeWrapper(String path) {
removeWrapper(contextMapElement, path);
}
// /**
// * Map the specified URI relative to the context,
// * mutating the given mapping data.
// *
// * @param uri URI
// * @param mappingData This structure will contain the result of the mapping
// * operation
// */
// public void map(CBuffer uri, MappingData mappingData)
// throws Exception {
//
// CBuffer uricc = uri.getCharBuffer();
// internalMapWrapper(contextMapElement, uricc, mappingData);
//
// }
}
/**
* Array containing the virtual hosts definitions.
*/
Host[] hosts = new Host[0];
/**
* If no other host is found.
* For single-host servers ( most common ) this is the only one
* used.
*/
Host defaultHost = new Host();
public BaseMapper() {
defaultHost.contextList = new ContextList();
}
// --------------------------------------------------------- Public Methods
public synchronized Host addHost(String name) {
if (name == null) {
name = "localhost";
}
Host[] newHosts = new Host[hosts.length + 1];
Host newHost = new Host();
newHost.name = name;
newHost.contextList = new ContextList();
if (insertMap(hosts, newHosts, newHost)) {
hosts = newHosts;
}
return newHost;
}
/**
* Remove a host from the mapper.
*
* @param name Virtual host name
*/
public synchronized void removeHost(String name) {
// Find and remove the old host
int pos = find(hosts, name);
if (pos < 0) {
return;
}
Object host = hosts[pos].object;
Host[] newHosts = new Host[hosts.length - 1];
if (removeMap(hosts, newHosts, name)) {
hosts = newHosts;
}
// Remove all aliases (they will map to the same host object)
for (int i = 0; i < newHosts.length; i++) {
if (newHosts[i].object == host) {
Host[] newHosts2 = new Host[hosts.length - 1];
if (removeMap(hosts, newHosts2, newHosts[i].name)) {
hosts = newHosts2;
}
}
}
}
/**
* Add an alias to an existing host.
* @param name The name of the host
* @param alias The alias to add
*/
public synchronized void addHostAlias(String name, String alias) {
int pos = find(hosts, name);
if (pos < 0) {
// Should not be adding an alias for a host that doesn't exist but
// just in case...
return;
}
Host realHost = hosts[pos];
Host[] newHosts = new Host[hosts.length + 1];
Host newHost = new Host();
newHost.name = alias;
newHost.contextList = realHost.contextList;
newHost.object = realHost;
if (insertMap(hosts, newHosts, newHost)) {
hosts = newHosts;
}
}
private Host getHost(String host) {
return getHost(CBuffer.newInstance().append(host));
}
private Host getHost(CBuffer host) {
if (hosts == null || hosts.length <= 1 || host == null
|| host.length() == 0 || host.equals("")) {
return defaultHost;
} else {
Host[] hosts = this.hosts;
// TODO: if hosts.length == 1 or defaultHost ?
int pos = findIgnoreCase(hosts, host);
if ((pos != -1) && (host.equalsIgnoreCase(hosts[pos].name))) {
return hosts[pos];
} else {
return defaultHost;
}
}
}
private Host getOrCreateHost(String hostName) {
Host host = getHost(CBuffer.newInstance().append(hostName));
if (host == null) {
host = addHost(hostName);
}
return host;
}
// Contexts
/**
* Add a new Context to an existing Host.
*
* @param hostName Virtual host name this context belongs to
* @param path Context path
* @param context Context object
* @param welcomeResources Welcome files defined for this context
* @param resources Static resources of the context
* @param ctxService
*/
public BaseMapper.Context addContext(String hostName, String path, Object context,
String[] welcomeResources, FileConnector resources,
HttpChannel.HttpService ctxService) {
if (path == null) {
path = "/";
}
Host host = getOrCreateHost(hostName);
int slashCount = slashCount(path);
synchronized (host) {
BaseMapper.Context[] contexts = host.contextList.contexts;
// Update nesting
if (slashCount > host.contextList.nesting) {
host.contextList.nesting = slashCount;
}
for (int i = 0; i < contexts.length; i++) {
if (path.equals(contexts[i].name)) {
return contexts[i];
}
}
BaseMapper.Context[] newContexts = new BaseMapper.Context[contexts.length + 1];
BaseMapper.Context newContext = new BaseMapper.Context(this);
newContext.name = path;
newContext.object = context;
if (welcomeResources != null) {
newContext.welcomeResources = welcomeResources;
}
newContext.resources = resources;
if (ctxService != null) {
newContext.defaultWrapper = new BaseMapper.ServiceMapping();
newContext.defaultWrapper.object = ctxService;
}
if (insertMap(contexts, newContexts, newContext)) {
host.contextList.contexts = newContexts;
}
return newContext;
}
}
/**
* Remove a context from an existing host.
*
* @param hostName Virtual host name this context belongs to
* @param path Context path
*/
public void removeContext(String hostName, String path) {
Host host = getHost(hostName);
synchronized (host) {
BaseMapper.Context[] contexts = host.contextList.contexts;
if( contexts.length == 0 ){
return;
}
BaseMapper.Context[] newContexts = new BaseMapper.Context[contexts.length - 1];
if (removeMap(contexts, newContexts, path)) {
host.contextList.contexts = newContexts;
// Recalculate nesting
host.contextList.nesting = 0;
for (int i = 0; i < newContexts.length; i++) {
int slashCount = slashCount(newContexts[i].name);
if (slashCount > host.contextList.nesting) {
host.contextList.nesting = slashCount;
}
}
}
}
}
/**
* Add a new Wrapper to an existing Context.
*
* @param hostName Virtual host name this wrapper belongs to
* @param contextPath Context path this wrapper belongs to
* @param path Wrapper mapping
* @param wrapper Wrapper object
*/
public void addWrapper(String hostName, String contextPath, String path,
Object wrapper) {
addWrapper(hostName, contextPath, path, wrapper, false);
}
public void addWrapper(String hostName, String contextPath, String path,
Object wrapper, boolean jspWildCard) {
Host host = getHost(hostName);
BaseMapper.Context[] contexts = host.contextList.contexts;
int pos2 = find(contexts, contextPath);
if( pos2<0 ) {
logger.severe("No context found: " + contextPath );
return;
}
BaseMapper.Context context = contexts[pos2];
if (context.name.equals(contextPath)) {
addWrapper(context, path, wrapper, jspWildCard);
}
}
public void addWrapper(BaseMapper.Context context, String path, Object wrapper) {
addWrapper(context, path, wrapper, false);
}
/**
* Adds a wrapper to the given context.
*
* @param context The context to which to add the wrapper
* @param path Wrapper mapping
* @param wrapper The Wrapper object
* @param jspWildCard true if the wrapper corresponds to the JspServlet
* and the mapping path contains a wildcard; false otherwise
*/
protected void addWrapper(BaseMapper.Context context, String path, Object wrapper,
boolean jspWildCard) {
synchronized (context) {
BaseMapper.ServiceMapping newWrapper = new BaseMapper.ServiceMapping();
newWrapper.object = wrapper;
newWrapper.jspWildCard = jspWildCard;
if (path.endsWith("/*")) {
// Wildcard wrapper
newWrapper.name = path.substring(0, path.length() - 2);
BaseMapper.ServiceMapping[] oldWrappers = context.wildcardWrappers;
BaseMapper.ServiceMapping[] newWrappers =
new BaseMapper.ServiceMapping[oldWrappers.length + 1];
if (insertMap(oldWrappers, newWrappers, newWrapper)) {
context.wildcardWrappers = newWrappers;
int slashCount = slashCount(newWrapper.name);
if (slashCount > context.nesting) {
context.nesting = slashCount;
}
}
} else if (path.startsWith("*.")) {
// Extension wrapper
newWrapper.name = path.substring(2);
BaseMapper.ServiceMapping[] oldWrappers = context.extensionWrappers;
BaseMapper.ServiceMapping[] newWrappers =
new BaseMapper.ServiceMapping[oldWrappers.length + 1];
if (insertMap(oldWrappers, newWrappers, newWrapper)) {
context.extensionWrappers = newWrappers;
}
} else if (path.equals("/")) {
// Default wrapper
newWrapper.name = "";
context.defaultWrapper = newWrapper;
} else {
// Exact wrapper
newWrapper.name = path;
BaseMapper.ServiceMapping[] oldWrappers = context.exactWrappers;
BaseMapper.ServiceMapping[] newWrappers =
new BaseMapper.ServiceMapping[oldWrappers.length + 1];
if (insertMap(oldWrappers, newWrappers, newWrapper)) {
context.exactWrappers = newWrappers;
}
}
}
}
/**
* Remove a wrapper from an existing context.
*
* @param hostName Virtual host name this wrapper belongs to
* @param contextPath Context path this wrapper belongs to
* @param path Wrapper mapping
*/
public void removeWrapper(String hostName, String contextPath,
String path) {
Host host = getHost(hostName);
BaseMapper.Context[] contexts = host.contextList.contexts;
int pos2 = find(contexts, contextPath);
if (pos2 < 0) {
return;
}
BaseMapper.Context context = contexts[pos2];
if (context.name.equals(contextPath)) {
removeWrapper(context, path);
}
}
protected void removeWrapper(BaseMapper.Context context, String path) {
synchronized (context) {
if (path.endsWith("/*")) {
// Wildcard wrapper
String name = path.substring(0, path.length() - 2);
BaseMapper.ServiceMapping[] oldWrappers = context.wildcardWrappers;
BaseMapper.ServiceMapping[] newWrappers =
new BaseMapper.ServiceMapping[oldWrappers.length - 1];
if (removeMap(oldWrappers, newWrappers, name)) {
// Recalculate nesting
context.nesting = 0;
for (int i = 0; i < newWrappers.length; i++) {
int slashCount = slashCount(newWrappers[i].name);
if (slashCount > context.nesting) {
context.nesting = slashCount;
}
}
context.wildcardWrappers = newWrappers;
}
} else if (path.startsWith("*.")) {
// Extension wrapper
String name = path.substring(2);
BaseMapper.ServiceMapping[] oldWrappers = context.extensionWrappers;
BaseMapper.ServiceMapping[] newWrappers =
new BaseMapper.ServiceMapping[oldWrappers.length - 1];
if (removeMap(oldWrappers, newWrappers, name)) {
context.extensionWrappers = newWrappers;
}
} else if (path.equals("/")) {
// Default wrapper
context.defaultWrapper = null;
} else {
// Exact wrapper
String name = path;
BaseMapper.ServiceMapping[] oldWrappers = context.exactWrappers;
BaseMapper.ServiceMapping[] newWrappers =
new BaseMapper.ServiceMapping[oldWrappers.length - 1];
if (removeMap(oldWrappers, newWrappers, name)) {
context.exactWrappers = newWrappers;
}
}
}
}
/**
* Map the specified host name and URI, mutating the given mapping data.
*
* @param host Virtual host name
* @param uri URI
* @param mappingData This structure will contain the result of the mapping
* operation
*/
public void map(CBuffer host, CBuffer uri,
MappingData mappingData)
throws Exception {
internalMap(host.length() == 0 ? null :
host, uri, mappingData);
}
// -------------------------------------------------------- Private Methods
// public Context mapContext(CBuffer host, CBuffer url);
/**
* Map the specified URI.
*/
private final void internalMap(CBuffer host, CBuffer uri,
MappingData mappingData)
throws Exception {
BaseMapper.Context[] contexts = null;
BaseMapper.Context context = null;
int nesting = 0;
// Virtual host mapping
Host mappedHost = getHost(host);
contexts = mappedHost.contextList.contexts;
nesting = mappedHost.contextList.nesting;
// Context mapping
if (contexts.length == 0) {
return;
}
if (mappingData.context == null) {
if (nesting < 1 || contexts.length == 1 && "".equals(contexts[0].name)) {
// if 1 context (default) -> fast return
context = contexts[0];
} else if (nesting == 1) {
// if all contexts are 1-component-only
int nextSlash = uri.indexOf('/', 1);
if (nextSlash == -1) {
nextSlash = uri.length();
}
mappingData.contextPath.set(uri, 0, nextSlash);
int pos = find(contexts, uri);
if (pos == -1) {
pos = find(contexts, "/");
}
if (pos >= 0) {
context = contexts[pos];
}
} else {
int pos = find(contexts, uri);
if (pos >= 0) {
int lastSlash = -1;
int length = -1;
boolean found = false;
CBuffer tmp = mappingData.tmpPrefix;
tmp.wrap(uri, 0, uri.length());
while (pos >= 0) {
if (tmp.startsWith(contexts[pos].name)) {
length = contexts[pos].name.length();
if (tmp.length() == length) {
found = true;
break;
} else if (tmp.startsWithIgnoreCase("/", length)) {
found = true;
break;
}
}
if (lastSlash == -1) {
lastSlash = tmp.nthSlash(nesting + 1);
} else {
lastSlash = tmp.lastIndexOf('/');
}
tmp.delete(lastSlash);
pos = find(contexts, tmp);
}
if (!found) {
if (contexts[0].name.equals("")) {
context = contexts[0];
}
} else {
context = contexts[pos];
}
}
}
if (context != null) {
mappingData.context = context.object;
mappingData.contextPath.set(context.name);
}
}
// Wrapper mapping
if ((context != null) && (mappingData.getServiceObject() == null)) {
internalMapWrapper(context, uri, mappingData);
}
}
/**
* Wrapper mapping, using servlet rules.
*/
protected final void internalMapWrapper(
BaseMapper.Context context,
CBuffer url,
MappingData mappingData)
throws Exception {
boolean noServletPath = false;
if (url.length() < context.name.length()) {
throw new IOException("Invalid mapping " + context.name + " " +
url);
}
try {
// Set the servlet path.
mappingData.tmpServletPath.set(url,
context.name.length(),
url.length() - context.name.length());
if (mappingData.tmpServletPath.length() == 0) {
mappingData.tmpServletPath.append('/');
// This is just the context /example or /
if (!context.name.equals("/")) {
noServletPath = true;
}
}
mapAfterContext(context, url, mappingData.tmpServletPath, mappingData,
noServletPath);
} catch (ArrayIndexOutOfBoundsException ex) {
System.err.println(1);
}
}
void mapAfterContext(BaseMapper.Context context,
CBuffer url, CBuffer urlNoContext,
MappingData mappingData, boolean noServletPath)
throws Exception {
// Rule 1 -- Exact Match
BaseMapper.ServiceMapping[] exactWrappers = context.exactWrappers;
internalMapExactWrapper(exactWrappers, urlNoContext, mappingData);
// Rule 2 -- Prefix Match
boolean checkJspWelcomeFiles = false;
BaseMapper.ServiceMapping[] wildcardWrappers = context.wildcardWrappers;
if (mappingData.getServiceObject() == null) {
internalMapWildcardWrapper(wildcardWrappers, context.nesting,
urlNoContext, mappingData);
if (mappingData.getServiceObject() != null
&& mappingData.service.jspWildCard) {
if (urlNoContext.lastChar() == '/') {
/*
* Path ending in '/' was mapped to JSP servlet based on
* wildcard match (e.g., as specified in url-pattern of a
* jsp-property-group.
* Force the context's welcome files, which are interpreted
* as JSP files (since they match the url-pattern), to be
* considered. See Bugzilla 27664.
*/
mappingData.service = null;
checkJspWelcomeFiles = true;
} else {
// See Bugzilla 27704
mappingData.wrapperPath.set(urlNoContext);
mappingData.pathInfo.recycle();
}
}
}
if(mappingData.getServiceObject() == null && noServletPath) {
// The path is empty, redirect to "/"
mappingData.redirectPath.set(context.name);
mappingData.redirectPath.append("/");
return;
}
// Rule 3 -- Extension Match
BaseMapper.ServiceMapping[] extensionWrappers = context.extensionWrappers;
if (mappingData.getServiceObject() == null && !checkJspWelcomeFiles) {
internalMapExtensionWrapper(extensionWrappers, urlNoContext, mappingData);
}
// Rule 4 -- Welcome resources processing for servlets
if (mappingData.getServiceObject() == null) {
boolean checkWelcomeFiles = checkJspWelcomeFiles;
if (!checkWelcomeFiles) {
checkWelcomeFiles = (urlNoContext.lastChar() == '/');
}
if (checkWelcomeFiles) {
for (int i = 0; (i < context.welcomeResources.length)
&& (mappingData.getServiceObject() == null); i++) {
CBuffer wpath = mappingData.tmpWelcome;
wpath.set(urlNoContext);
wpath.append(context.welcomeResources[i]);
// Rule 4a -- Welcome resources processing for exact macth
internalMapExactWrapper(exactWrappers, urlNoContext, mappingData);
// Rule 4b -- Welcome resources processing for prefix match
if (mappingData.getServiceObject() == null) {
internalMapWildcardWrapper
(wildcardWrappers, context.nesting,
urlNoContext, mappingData);
}
// Rule 4c -- Welcome resources processing
// for physical folder
if (mappingData.getServiceObject() == null
&& context.resources != null) {
String pathStr = urlNoContext.toString();
mapWelcomResource(context, urlNoContext, mappingData,
extensionWrappers, pathStr);
}
}
}
}
// Rule 7 -- Default servlet
if (mappingData.getServiceObject() == null && !checkJspWelcomeFiles) {
if (context.defaultWrapper != null) {
mappingData.service = context.defaultWrapper;
mappingData.requestPath.set(urlNoContext);
mappingData.wrapperPath.set(urlNoContext);
}
// Redirection to a folder
if (context.resources != null && urlNoContext.lastChar() != '/') {
String pathStr = urlNoContext.toString();
mapDefaultServlet(context, urlNoContext, mappingData,
url,
pathStr);
}
}
}
/**
* Filesystem-dependent method:
* if pathStr corresponds to a directory, we'll need to redirect with /
* at end.
*/
protected void mapDefaultServlet(BaseMapper.Context context,
CBuffer path,
MappingData mappingData,
CBuffer url,
String pathStr) throws IOException {
if (context.resources != null
&& context.resources.isDirectory(pathStr)) {
mappingData.redirectPath.set(url);
mappingData.redirectPath.append("/");
} else {
mappingData.requestPath.set(pathStr);
mappingData.wrapperPath.set(pathStr);
}
}
/**
* Filesystem dependent method:
* check if a resource exists in filesystem.
*/
protected void mapWelcomResource(BaseMapper.Context context, CBuffer path,
MappingData mappingData,
BaseMapper.ServiceMapping[] extensionWrappers, String pathStr) {
if (context.resources != null &&
context.resources.isFile(pathStr)) {
internalMapExtensionWrapper(extensionWrappers,
path, mappingData);
if (mappingData.getServiceObject() == null
&& context.defaultWrapper != null) {
mappingData.service = context.defaultWrapper;
mappingData.requestPath.set(path);
mappingData.wrapperPath.set(path);
mappingData.requestPath.set(pathStr);
mappingData.wrapperPath.set(pathStr);
}
}
}
/**
* Exact mapping.
*/
private final void internalMapExactWrapper
(BaseMapper.ServiceMapping[] wrappers, CBuffer path, MappingData mappingData) {
int pos = find(wrappers, path);
if ((pos != -1) && (path.equals(wrappers[pos].name))) {
mappingData.requestPath.set(wrappers[pos].name);
mappingData.wrapperPath.set(wrappers[pos].name);
mappingData.service = wrappers[pos];
}
}
/**
* Prefix mapping. ( /foo/* )
*/
private final void internalMapWildcardWrapper
(BaseMapper.ServiceMapping[] wrappers, int nesting, CBuffer path,
MappingData mappingData) {
int lastSlash = -1;
int length = -1;
CBuffer tmp = mappingData.tmpPrefix;
tmp.wrap(path, 0, path.length());
int pos = find(wrappers, tmp);
if (pos != -1) {
boolean found = false;
while (pos >= 0) {
if (tmp.startsWith(wrappers[pos].name)) {
length = wrappers[pos].name.length();
if (tmp.length() == length) {
found = true;
break;
} else if (tmp.startsWithIgnoreCase("/", length)) {
found = true;
break;
}
}
if (lastSlash == -1) {
lastSlash = tmp.nthSlash(nesting + 1);
} else {
lastSlash = tmp.lastIndexOf('/');
}
tmp.delete(lastSlash);
pos = find(wrappers, tmp);
}
if (found) {
mappingData.wrapperPath.set(wrappers[pos].name);
if (path.length() > length) {
mappingData.pathInfo.set
(path, length, path.length() - length);
}
mappingData.requestPath.set(path);
mappingData.service = wrappers[pos];
}
}
}
/**
* Extension mappings.
*/
protected final void internalMapExtensionWrapper
(BaseMapper.ServiceMapping[] wrappers, CBuffer path, MappingData mappingData) {
int dot = path.getExtension(mappingData.ext, '/', '.');
if (dot >= 0) {
int pos = find(wrappers, mappingData.ext);
if ((pos != -1)
&& (mappingData.ext.equals(wrappers[pos].name))) {
mappingData.wrapperPath.set(path);
mappingData.requestPath.set(path);
mappingData.service = wrappers[pos];
}
}
}
/**
* Find a map elemnt given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int find(BaseMapper.Mapping[] map, CBuffer name) {
int a = 0;
int b = map.length - 1;
// Special cases: -1 and 0
if (b == -1) {
return -1;
}
if (name.compare(map[0].name) < 0 ) {
return -1;
}
if (b == 0) {
return 0;
}
int i = 0;
while (true) {
i = (b + a) / 2;
int result = name.compare(map[i].name);
if (result == 1) {
a = i;
} else if (result == 0) {
return i;
} else {
b = i;
}
if ((b - a) == 1) {
int result2 = name.compare(map[b].name);
if (result2 < 0) {
return a;
} else {
return b;
}
}
}
}
/**
* Find a map elemnt given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int findIgnoreCase(BaseMapper.Mapping[] map,
CBuffer name) {
int a = 0;
int b = map.length - 1;
// Special cases: -1 and 0
if (b == -1) {
return -1;
}
if (name.compareIgnoreCase(map[0].name) < 0 ) {
return -1;
}
if (b == 0) {
return 0;
}
int i = 0;
while (true) {
i = (b + a) / 2;
int result = name.compareIgnoreCase(map[i].name);
if (result == 1) {
a = i;
} else if (result == 0) {
return i;
} else {
b = i;
}
if ((b - a) == 1) {
int result2 = name.compareIgnoreCase(map[b].name);
if (result2 < 0) {
return a;
} else {
return b;
}
}
}
}
/**
* Find a map element given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int find(BaseMapper.Mapping[] map, String name) {
int a = 0;
int b = map.length - 1;
// Special cases: -1 and 0
if (b == -1) {
return -1;
}
if (name.compareTo(map[0].name) < 0) {
return -1;
}
if (b == 0) {
return 0;
}
int i = 0;
while (true) {
i = (b + a) / 2;
int result = name.compareTo(map[i].name);
if (result > 0) {
a = i;
} else if (result == 0) {
return i;
} else {
b = i;
}
if ((b - a) == 1) {
int result2 = name.compareTo(map[b].name);
if (result2 < 0) {
return a;
} else {
return b;
}
}
}
}
/**
* Return the slash count in a given string.
*/
private static final int slashCount(String name) {
int pos = -1;
int count = 0;
while ((pos = name.indexOf('/', pos + 1)) != -1) {
count++;
}
return count;
}
/**
* Insert into the right place in a sorted MapElement array, and prevent
* duplicates.
*/
private static final boolean insertMap
(BaseMapper.Mapping[] oldMap, BaseMapper.Mapping[] newMap, BaseMapper.Mapping newElement) {
int pos = find(oldMap, newElement.name);
if ((pos != -1) && (newElement.name.equals(oldMap[pos].name))) {
return false;
}
System.arraycopy(oldMap, 0, newMap, 0, pos + 1);
newMap[pos + 1] = newElement;
System.arraycopy
(oldMap, pos + 1, newMap, pos + 2, oldMap.length - pos - 1);
return true;
}
/**
* Insert into the right place in a sorted MapElement array.
*/
private static final boolean removeMap
(BaseMapper.Mapping[] oldMap, BaseMapper.Mapping[] newMap, String name) {
int pos = find(oldMap, name);
if ((pos != -1) && (name.equals(oldMap[pos].name))) {
System.arraycopy(oldMap, 0, newMap, 0, pos);
System.arraycopy(oldMap, pos + 1, newMap, pos,
oldMap.length - pos - 1);
return true;
}
return false;
}
// ------------------------------------------------- MapElement Inner Class
protected static final class Host
extends BaseMapper.Mapping {
//Map<String, Context> contexts = new HashMap();
//Context rootContext;
public ContextList contextList = null;
}
// ------------------------------------------------ ContextList Inner Class
// Shared among host aliases.
protected static final class ContextList {
public BaseMapper.Context[] contexts = new BaseMapper.Context[0];
public int nesting = 0;
}
public static final class Context extends BaseMapper.Mapping {
Context(BaseMapper mapper) {
this.mapper = mapper;
}
public BaseMapper mapper;
public String[] welcomeResources = new String[0];
public FileConnector resources = null;
public BaseMapper.ServiceMapping defaultWrapper = null;
public BaseMapper.ServiceMapping[] exactWrappers = new BaseMapper.ServiceMapping[0];
public BaseMapper.ServiceMapping[] wildcardWrappers = new BaseMapper.ServiceMapping[0];
public BaseMapper.ServiceMapping[] extensionWrappers = new BaseMapper.ServiceMapping[0];
public int nesting = 0;
public void addWrapper(String path, HttpService service) {
mapper.addWrapper(this, path, service);
}
}
public static class ServiceMapping extends BaseMapper.Mapping {
public boolean jspWildCard = false;
// If set, the service will run in the selector thread ( should
// be non-blocking )
public boolean selectorThread = false;
}
protected static abstract class Mapping {
public String name = null;
public Object object = null;
public String toString() {
if (name == null || "".equals(name)) {
return "DEFAULT";
}
return name;
}
}
// ---------------------------------------------------- Context Inner Class
}
|
googleapis/google-cloud-java | 37,412 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/ListRegionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/regions.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* Response message for the `ListRegions` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListRegionsResponse}
*/
public final class ListRegionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.ListRegionsResponse)
ListRegionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRegionsResponse.newBuilder() to construct.
private ListRegionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRegionsResponse() {
regions_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRegionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListRegionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListRegionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse.class,
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse.Builder.class);
}
public static final int REGIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.shopping.merchant.accounts.v1beta.Region> regions_;
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.shopping.merchant.accounts.v1beta.Region> getRegionsList() {
return regions_;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder>
getRegionsOrBuilderList() {
return regions_;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
@java.lang.Override
public int getRegionsCount() {
return regions_.size();
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.Region getRegions(int index) {
return regions_.get(index);
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder getRegionsOrBuilder(
int index) {
return regions_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < regions_.size(); i++) {
output.writeMessage(1, regions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < regions_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, regions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse other =
(com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse) obj;
if (!getRegionsList().equals(other.getRegionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRegionsCount() > 0) {
hash = (37 * hash) + REGIONS_FIELD_NUMBER;
hash = (53 * hash) + getRegionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `ListRegions` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListRegionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.ListRegionsResponse)
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListRegionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListRegionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse.class,
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse.Builder.class);
}
// Construct using com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (regionsBuilder_ == null) {
regions_ = java.util.Collections.emptyList();
} else {
regions_ = null;
regionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListRegionsResponse_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse build() {
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse buildPartial() {
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse result =
new com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse result) {
if (regionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
regions_ = java.util.Collections.unmodifiableList(regions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.regions_ = regions_;
} else {
result.regions_ = regionsBuilder_.build();
}
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse) {
return mergeFrom((com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse other) {
if (other
== com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse.getDefaultInstance())
return this;
if (regionsBuilder_ == null) {
if (!other.regions_.isEmpty()) {
if (regions_.isEmpty()) {
regions_ = other.regions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRegionsIsMutable();
regions_.addAll(other.regions_);
}
onChanged();
}
} else {
if (!other.regions_.isEmpty()) {
if (regionsBuilder_.isEmpty()) {
regionsBuilder_.dispose();
regionsBuilder_ = null;
regions_ = other.regions_;
bitField0_ = (bitField0_ & ~0x00000001);
regionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRegionsFieldBuilder()
: null;
} else {
regionsBuilder_.addAllMessages(other.regions_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.shopping.merchant.accounts.v1beta.Region m =
input.readMessage(
com.google.shopping.merchant.accounts.v1beta.Region.parser(),
extensionRegistry);
if (regionsBuilder_ == null) {
ensureRegionsIsMutable();
regions_.add(m);
} else {
regionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.shopping.merchant.accounts.v1beta.Region> regions_ =
java.util.Collections.emptyList();
private void ensureRegionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
regions_ =
new java.util.ArrayList<com.google.shopping.merchant.accounts.v1beta.Region>(regions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.Region,
com.google.shopping.merchant.accounts.v1beta.Region.Builder,
com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder>
regionsBuilder_;
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1beta.Region> getRegionsList() {
if (regionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(regions_);
} else {
return regionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public int getRegionsCount() {
if (regionsBuilder_ == null) {
return regions_.size();
} else {
return regionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Region getRegions(int index) {
if (regionsBuilder_ == null) {
return regions_.get(index);
} else {
return regionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder setRegions(
int index, com.google.shopping.merchant.accounts.v1beta.Region value) {
if (regionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionsIsMutable();
regions_.set(index, value);
onChanged();
} else {
regionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder setRegions(
int index, com.google.shopping.merchant.accounts.v1beta.Region.Builder builderForValue) {
if (regionsBuilder_ == null) {
ensureRegionsIsMutable();
regions_.set(index, builderForValue.build());
onChanged();
} else {
regionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder addRegions(com.google.shopping.merchant.accounts.v1beta.Region value) {
if (regionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionsIsMutable();
regions_.add(value);
onChanged();
} else {
regionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder addRegions(
int index, com.google.shopping.merchant.accounts.v1beta.Region value) {
if (regionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionsIsMutable();
regions_.add(index, value);
onChanged();
} else {
regionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder addRegions(
com.google.shopping.merchant.accounts.v1beta.Region.Builder builderForValue) {
if (regionsBuilder_ == null) {
ensureRegionsIsMutable();
regions_.add(builderForValue.build());
onChanged();
} else {
regionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder addRegions(
int index, com.google.shopping.merchant.accounts.v1beta.Region.Builder builderForValue) {
if (regionsBuilder_ == null) {
ensureRegionsIsMutable();
regions_.add(index, builderForValue.build());
onChanged();
} else {
regionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder addAllRegions(
java.lang.Iterable<? extends com.google.shopping.merchant.accounts.v1beta.Region> values) {
if (regionsBuilder_ == null) {
ensureRegionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, regions_);
onChanged();
} else {
regionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder clearRegions() {
if (regionsBuilder_ == null) {
regions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
regionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public Builder removeRegions(int index) {
if (regionsBuilder_ == null) {
ensureRegionsIsMutable();
regions_.remove(index);
onChanged();
} else {
regionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Region.Builder getRegionsBuilder(
int index) {
return getRegionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder getRegionsOrBuilder(
int index) {
if (regionsBuilder_ == null) {
return regions_.get(index);
} else {
return regionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public java.util.List<? extends com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder>
getRegionsOrBuilderList() {
if (regionsBuilder_ != null) {
return regionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(regions_);
}
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Region.Builder addRegionsBuilder() {
return getRegionsFieldBuilder()
.addBuilder(com.google.shopping.merchant.accounts.v1beta.Region.getDefaultInstance());
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Region.Builder addRegionsBuilder(
int index) {
return getRegionsFieldBuilder()
.addBuilder(
index, com.google.shopping.merchant.accounts.v1beta.Region.getDefaultInstance());
}
/**
*
*
* <pre>
* The regions from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Region regions = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1beta.Region.Builder>
getRegionsBuilderList() {
return getRegionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.Region,
com.google.shopping.merchant.accounts.v1beta.Region.Builder,
com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder>
getRegionsFieldBuilder() {
if (regionsBuilder_ == null) {
regionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.Region,
com.google.shopping.merchant.accounts.v1beta.Region.Builder,
com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder>(
regions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
regions_ = null;
}
return regionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.ListRegionsResponse)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.ListRegionsResponse)
private static final com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse();
}
public static com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRegionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRegionsResponse>() {
@java.lang.Override
public ListRegionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRegionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRegionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListRegionsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/logging-log4j2 | 37,581 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/rolling/RollingFileManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.logging.log4j.core.appender.rolling;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileTime;
import java.util.Collection;
import java.util.Date;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LifeCycle;
import org.apache.logging.log4j.core.LifeCycle2;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.ConfigurationFactoryData;
import org.apache.logging.log4j.core.appender.FileManager;
import org.apache.logging.log4j.core.appender.ManagerFactory;
import org.apache.logging.log4j.core.appender.rolling.action.AbstractAction;
import org.apache.logging.log4j.core.appender.rolling.action.Action;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.util.Constants;
import org.apache.logging.log4j.core.util.FileUtils;
import org.apache.logging.log4j.core.util.Log4jThreadFactory;
/**
* The Rolling File Manager.
*/
public class RollingFileManager extends FileManager {
private static RollingFileManagerFactory factory = new RollingFileManagerFactory();
private static final int MAX_TRIES = 3;
private static final int MIN_DURATION = 100;
private static final FileTime EPOCH = FileTime.fromMillis(0);
protected long size;
private long initialTime;
private volatile PatternProcessor patternProcessor;
private final Semaphore semaphore = new Semaphore(1);
private final Log4jThreadFactory threadFactory = Log4jThreadFactory.createThreadFactory("RollingFileManager");
private volatile TriggeringPolicy triggeringPolicy;
private volatile RolloverStrategy rolloverStrategy;
private volatile boolean renameEmptyFiles;
private volatile boolean initialized;
private volatile String fileName;
private final boolean directWrite;
private final CopyOnWriteArrayList<RolloverListener> rolloverListeners = new CopyOnWriteArrayList<>();
/* This executor pool will create a new Thread for every work async action to be performed. Using it allows
us to make sure all the Threads are completed when the Manager is stopped. */
private final ExecutorService asyncExecutor =
new ThreadPoolExecutor(0, Integer.MAX_VALUE, 0, TimeUnit.MILLISECONDS, new EmptyQueue(), threadFactory);
private static final AtomicReferenceFieldUpdater<RollingFileManager, TriggeringPolicy> triggeringPolicyUpdater =
AtomicReferenceFieldUpdater.newUpdater(
RollingFileManager.class, TriggeringPolicy.class, "triggeringPolicy");
private static final AtomicReferenceFieldUpdater<RollingFileManager, RolloverStrategy> rolloverStrategyUpdater =
AtomicReferenceFieldUpdater.newUpdater(
RollingFileManager.class, RolloverStrategy.class, "rolloverStrategy");
private static final AtomicReferenceFieldUpdater<RollingFileManager, PatternProcessor> patternProcessorUpdater =
AtomicReferenceFieldUpdater.newUpdater(
RollingFileManager.class, PatternProcessor.class, "patternProcessor");
@Deprecated
protected RollingFileManager(
final String fileName,
final String pattern,
final OutputStream os,
final boolean append,
final long size,
final long initialTime,
final TriggeringPolicy triggeringPolicy,
final RolloverStrategy rolloverStrategy,
final String advertiseURI,
final Layout<? extends Serializable> layout,
final int bufferSize,
final boolean writeHeader) {
this(
fileName,
pattern,
os,
append,
size,
initialTime,
triggeringPolicy,
rolloverStrategy,
advertiseURI,
layout,
writeHeader,
ByteBuffer.wrap(new byte[Constants.ENCODER_BYTE_BUFFER_SIZE]));
}
@Deprecated
protected RollingFileManager(
final String fileName,
final String pattern,
final OutputStream os,
final boolean append,
final long size,
final long initialTime,
final TriggeringPolicy triggeringPolicy,
final RolloverStrategy rolloverStrategy,
final String advertiseURI,
final Layout<? extends Serializable> layout,
final boolean writeHeader,
final ByteBuffer buffer) {
super(fileName != null ? fileName : pattern, os, append, false, advertiseURI, layout, writeHeader, buffer);
this.size = size;
this.initialTime = initialTime;
this.triggeringPolicy = triggeringPolicy;
this.rolloverStrategy = rolloverStrategy;
this.patternProcessor = new PatternProcessor(pattern);
this.patternProcessor.setPrevFileTime(initialTime);
this.fileName = fileName;
this.directWrite = rolloverStrategy instanceof DirectWriteRolloverStrategy;
}
@Deprecated
protected RollingFileManager(
final LoggerContext loggerContext,
final String fileName,
final String pattern,
final OutputStream os,
final boolean append,
final boolean createOnDemand,
final long size,
final long initialTime,
final TriggeringPolicy triggeringPolicy,
final RolloverStrategy rolloverStrategy,
final String advertiseURI,
final Layout<? extends Serializable> layout,
final boolean writeHeader,
final ByteBuffer buffer) {
super(
loggerContext,
fileName != null ? fileName : pattern,
os,
append,
false,
createOnDemand,
advertiseURI,
layout,
writeHeader,
buffer);
this.size = size;
this.initialTime = initialTime;
this.triggeringPolicy = triggeringPolicy;
this.rolloverStrategy = rolloverStrategy;
this.patternProcessor = new PatternProcessor(pattern);
this.patternProcessor.setPrevFileTime(initialTime);
this.fileName = fileName;
this.directWrite = rolloverStrategy instanceof DirectWriteRolloverStrategy;
}
/**
* @since 2.9
*/
protected RollingFileManager(
final LoggerContext loggerContext,
final String fileName,
final String pattern,
final OutputStream os,
final boolean append,
final boolean createOnDemand,
final long size,
final long initialTime,
final TriggeringPolicy triggeringPolicy,
final RolloverStrategy rolloverStrategy,
final String advertiseURI,
final Layout<? extends Serializable> layout,
final String filePermissions,
final String fileOwner,
final String fileGroup,
final boolean writeHeader,
final ByteBuffer buffer) {
super(
loggerContext,
fileName != null ? fileName : pattern,
os,
append,
false,
createOnDemand,
advertiseURI,
layout,
filePermissions,
fileOwner,
fileGroup,
writeHeader,
buffer);
this.size = size;
this.initialTime = initialTime;
this.patternProcessor = new PatternProcessor(pattern);
this.patternProcessor.setPrevFileTime(initialTime);
this.triggeringPolicy = triggeringPolicy;
this.rolloverStrategy = rolloverStrategy;
this.fileName = fileName;
this.directWrite = rolloverStrategy instanceof DirectFileRolloverStrategy;
}
@SuppressFBWarnings(
value = "PATH_TRAVERSAL_IN",
justification = "The name of the accessed files is based on a configuration value.")
public void initialize() {
if (!initialized) {
LOGGER.debug("Initializing triggering policy {}", triggeringPolicy);
initialized = true;
// LOG4J2-2981 - set the file size before initializing the triggering policy.
if (directWrite) {
// LOG4J2-2485: Initialize size from the most recently written file.
final File file = new File(getFileName());
if (file.exists()) {
size = file.length();
} else {
((DirectFileRolloverStrategy) rolloverStrategy).clearCurrentFileName();
}
}
triggeringPolicy.initialize(this);
if (triggeringPolicy instanceof LifeCycle) {
((LifeCycle) triggeringPolicy).start();
}
if (directWrite) {
// LOG4J2-2485: Initialize size from the most recently written file.
final File file = new File(getFileName());
if (file.exists()) {
size = file.length();
} else {
((DirectFileRolloverStrategy) rolloverStrategy).clearCurrentFileName();
}
}
}
}
/**
* Returns a RollingFileManager.
* @param fileName The file name.
* @param pattern The pattern for rolling file.
* @param append true if the file should be appended to.
* @param bufferedIO true if data should be buffered.
* @param policy The TriggeringPolicy.
* @param strategy The RolloverStrategy.
* @param advertiseURI the URI to use when advertising the file
* @param layout The Layout.
* @param bufferSize buffer size to use if bufferedIO is true
* @param immediateFlush flush on every write or not
* @param createOnDemand true if you want to lazy-create the file (a.k.a. on-demand.)
* @param filePermissions File permissions
* @param fileOwner File owner
* @param fileGroup File group
* @param configuration The configuration.
* @return A RollingFileManager.
*/
public static RollingFileManager getFileManager(
final String fileName,
final String pattern,
final boolean append,
final boolean bufferedIO,
final TriggeringPolicy policy,
final RolloverStrategy strategy,
final String advertiseURI,
final Layout<? extends Serializable> layout,
final int bufferSize,
final boolean immediateFlush,
final boolean createOnDemand,
final String filePermissions,
final String fileOwner,
final String fileGroup,
final Configuration configuration) {
if (strategy instanceof DirectWriteRolloverStrategy && fileName != null) {
LOGGER.error("The fileName attribute must not be specified with the DirectWriteRolloverStrategy");
return null;
}
final String name = fileName == null ? pattern : fileName;
return narrow(
RollingFileManager.class,
getManager(
name,
new FactoryData(
fileName,
pattern,
append,
bufferedIO,
policy,
strategy,
advertiseURI,
layout,
bufferSize,
immediateFlush,
createOnDemand,
filePermissions,
fileOwner,
fileGroup,
configuration),
factory));
}
/**
* Add a RolloverListener.
* @param listener The RolloverListener.
*/
public void addRolloverListener(final RolloverListener listener) {
rolloverListeners.add(listener);
}
/**
* Remove a RolloverListener.
* @param listener The RolloverListener.
*/
public void removeRolloverListener(final RolloverListener listener) {
rolloverListeners.remove(listener);
}
/**
* Returns the name of the File being managed.
* @return The name of the File being managed.
*/
@Override
public String getFileName() {
if (directWrite) {
fileName = ((DirectFileRolloverStrategy) rolloverStrategy).getCurrentFileName(this);
}
return fileName;
}
@Override
protected void createParentDir(File file) {
if (directWrite) {
final File parent = file.getParentFile();
// If the parent is null the file is in the current working directory.
if (parent != null) {
parent.mkdirs();
}
}
}
public boolean isDirectWrite() {
return directWrite;
}
public FileExtension getFileExtension() {
return patternProcessor.getFileExtension();
}
// override to make visible for unit tests
@Override
protected synchronized void write(
final byte[] bytes, final int offset, final int length, final boolean immediateFlush) {
super.write(bytes, offset, length, immediateFlush);
}
@Override
protected synchronized void writeToDestination(final byte[] bytes, final int offset, final int length) {
size += length;
super.writeToDestination(bytes, offset, length);
}
public boolean isRenameEmptyFiles() {
return renameEmptyFiles;
}
public void setRenameEmptyFiles(final boolean renameEmptyFiles) {
this.renameEmptyFiles = renameEmptyFiles;
}
/**
* Returns the current size of the file.
* @return The size of the file in bytes.
*/
public long getFileSize() {
return size + byteBuffer.position();
}
/**
* Returns the time the file was created.
* @return The time the file was created.
*/
public long getFileTime() {
return initialTime;
}
/**
* Determines if a rollover should occur.
* @param event The LogEvent.
*/
public synchronized void checkRollover(final LogEvent event) {
if (triggeringPolicy.isTriggeringEvent(event)) {
rollover();
}
}
@Override
public boolean releaseSub(final long timeout, final TimeUnit timeUnit) {
LOGGER.debug("Shutting down RollingFileManager {}", getName());
boolean stopped = true;
if (triggeringPolicy instanceof LifeCycle2) {
stopped &= ((LifeCycle2) triggeringPolicy).stop(timeout, timeUnit);
} else if (triggeringPolicy instanceof LifeCycle) {
((LifeCycle) triggeringPolicy).stop();
stopped &= true;
}
final boolean status = super.releaseSub(timeout, timeUnit) && stopped;
asyncExecutor.shutdown();
try {
// Allow at least the minimum interval to pass so async actions can complete.
final long millis = timeUnit.toMillis(timeout);
final long waitInterval = MIN_DURATION < millis ? millis : MIN_DURATION;
for (int count = 1; count <= MAX_TRIES && !asyncExecutor.isTerminated(); ++count) {
asyncExecutor.awaitTermination(waitInterval * count, TimeUnit.MILLISECONDS);
}
if (asyncExecutor.isTerminated()) {
LOGGER.debug("All asynchronous threads have terminated");
} else {
asyncExecutor.shutdownNow();
try {
asyncExecutor.awaitTermination(timeout, timeUnit);
if (asyncExecutor.isTerminated()) {
LOGGER.debug("All asynchronous threads have terminated");
} else {
LOGGER.debug(
"RollingFileManager shutting down but some asynchronous services may not have completed");
}
} catch (final InterruptedException inner) {
LOGGER.warn("RollingFileManager stopped but some asynchronous services may not have completed.");
}
}
} catch (final InterruptedException ie) {
asyncExecutor.shutdownNow();
try {
asyncExecutor.awaitTermination(timeout, timeUnit);
if (asyncExecutor.isTerminated()) {
LOGGER.debug("All asynchronous threads have terminated");
}
} catch (final InterruptedException inner) {
LOGGER.warn("RollingFileManager stopped but some asynchronous services may not have completed.");
}
// Preserve interrupt status
Thread.currentThread().interrupt();
}
LOGGER.debug("RollingFileManager shutdown completed with status {}", status);
return status;
}
public synchronized void rollover(final Date prevFileTime, final Date prevRollTime) {
LOGGER.debug("Rollover PrevFileTime: {}, PrevRollTime: {}", prevFileTime.getTime(), prevRollTime.getTime());
getPatternProcessor().setPrevFileTime(prevFileTime.getTime());
getPatternProcessor().setCurrentFileTime(prevRollTime.getTime());
rollover();
}
public synchronized void rollover() {
if (!hasOutputStream() && !isCreateOnDemand() && !isDirectWrite()) {
return;
}
final String currentFileName = fileName;
if (rolloverListeners.size() > 0) {
for (RolloverListener listener : rolloverListeners) {
try {
listener.rolloverTriggered(currentFileName);
} catch (Exception ex) {
LOGGER.warn(
"Rollover Listener {} failed with {}: {}",
listener.getClass().getSimpleName(),
ex.getClass().getName(),
ex.getMessage());
}
}
}
final boolean interrupted = Thread.interrupted(); // clear interrupted state
try {
if (interrupted) {
LOGGER.warn("RollingFileManager cleared thread interrupted state, continue to rollover");
}
if (rollover(rolloverStrategy)) {
try {
size = 0;
initialTime = System.currentTimeMillis();
createFileAfterRollover();
} catch (final IOException e) {
logError("Failed to create file after rollover", e);
}
}
} finally {
if (interrupted) { // restore interrupted state
Thread.currentThread().interrupt();
}
}
if (rolloverListeners.size() > 0) {
for (RolloverListener listener : rolloverListeners) {
try {
listener.rolloverComplete(currentFileName);
} catch (Exception ex) {
LOGGER.warn(
"Rollover Listener {} failed with {}: {}",
listener.getClass().getSimpleName(),
ex.getClass().getName(),
ex.getMessage());
}
}
}
}
protected void createFileAfterRollover() throws IOException {
setOutputStream(createOutputStream());
}
/**
* Returns the pattern processor.
* @return The PatternProcessor.
*/
public PatternProcessor getPatternProcessor() {
return patternProcessor;
}
public void setTriggeringPolicy(final TriggeringPolicy triggeringPolicy) {
triggeringPolicy.initialize(this);
final TriggeringPolicy policy = this.triggeringPolicy;
int count = 0;
boolean policyUpdated = false;
do {
++count;
} while (!(policyUpdated = triggeringPolicyUpdater.compareAndSet(this, this.triggeringPolicy, triggeringPolicy))
&& count < MAX_TRIES);
if (policyUpdated) {
if (triggeringPolicy instanceof LifeCycle) {
((LifeCycle) triggeringPolicy).start();
}
if (policy instanceof LifeCycle) {
((LifeCycle) policy).stop();
}
} else if (triggeringPolicy instanceof LifeCycle) {
((LifeCycle) triggeringPolicy).stop();
}
}
public void setRolloverStrategy(final RolloverStrategy rolloverStrategy) {
rolloverStrategyUpdater.compareAndSet(this, this.rolloverStrategy, rolloverStrategy);
}
public void setPatternProcessor(final PatternProcessor patternProcessor) {
patternProcessorUpdater.compareAndSet(this, this.patternProcessor, patternProcessor);
}
/**
* Returns the triggering policy.
* @param <T> TriggeringPolicy type
* @return The TriggeringPolicy
*/
@SuppressWarnings("unchecked")
public <T extends TriggeringPolicy> T getTriggeringPolicy() {
// TODO We could parameterize this class with a TriggeringPolicy instead of type casting here.
return (T) this.triggeringPolicy;
}
/**
* Package-private access for tests only.
*
* @return The semaphore that controls access to the rollover operation.
*/
Semaphore getSemaphore() {
return semaphore;
}
/**
* Returns the rollover strategy.
* @return The RolloverStrategy
*/
public RolloverStrategy getRolloverStrategy() {
return this.rolloverStrategy;
}
private boolean rollover(final RolloverStrategy strategy) {
boolean outputStreamClosed = false;
try {
// Block until the asynchronous operation is completed.
semaphore.acquire();
} catch (final InterruptedException e) {
logError("Thread interrupted while attempting to check rollover", e);
return outputStreamClosed;
}
boolean asyncActionStarted = true;
try {
final RolloverDescription descriptor = strategy.rollover(this);
if (descriptor != null) {
writeFooter();
closeOutputStream();
outputStreamClosed = true;
boolean syncActionSuccess = true;
if (descriptor.getSynchronous() != null) {
LOGGER.debug("RollingFileManager executing synchronous {}", descriptor.getSynchronous());
try {
syncActionSuccess = descriptor.getSynchronous().execute();
} catch (final Exception ex) {
syncActionSuccess = false;
logError("Caught error in synchronous task", ex);
}
}
if (syncActionSuccess && descriptor.getAsynchronous() != null) {
LOGGER.debug("RollingFileManager executing async {}", descriptor.getAsynchronous());
asyncExecutor.execute(new AsyncAction(descriptor.getAsynchronous(), this));
asyncActionStarted = false;
}
}
return outputStreamClosed;
} finally {
if (asyncActionStarted) {
semaphore.release();
}
}
}
/**
* Performs actions asynchronously.
*/
private static class AsyncAction extends AbstractAction {
private final Action action;
private final RollingFileManager manager;
/**
* Constructor.
* @param act The action to perform.
* @param manager The manager.
*/
public AsyncAction(final Action act, final RollingFileManager manager) {
this.action = act;
this.manager = manager;
}
/**
* Executes an action.
*
* @return true if action was successful. A return value of false will cause
* the rollover to be aborted if possible.
* @throws java.io.IOException if IO error, a thrown exception will cause the rollover
* to be aborted if possible.
*/
@Override
public boolean execute() throws IOException {
try {
return action.execute();
} finally {
manager.semaphore.release();
}
}
/**
* Cancels the action if not already initialized or waits till completion.
*/
@Override
public void close() {
action.close();
}
/**
* Determines if action has been completed.
*
* @return true if action is complete.
*/
@Override
public boolean isComplete() {
return action.isComplete();
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append(super.toString());
builder.append("[action=");
builder.append(action);
builder.append(", manager=");
builder.append(manager);
builder.append(", isComplete()=");
builder.append(isComplete());
builder.append(", isInterrupted()=");
builder.append(isInterrupted());
builder.append("]");
return builder.toString();
}
}
/**
* Factory data.
*/
private static class FactoryData extends ConfigurationFactoryData {
private final String fileName;
private final String pattern;
private final boolean append;
private final boolean bufferedIO;
private final int bufferSize;
private final boolean immediateFlush;
private final boolean createOnDemand;
private final TriggeringPolicy policy;
private final RolloverStrategy strategy;
private final String advertiseURI;
private final Layout<? extends Serializable> layout;
private final String filePermissions;
private final String fileOwner;
private final String fileGroup;
/**
* Creates the data for the factory.
* @param pattern The pattern.
* @param append The append flag.
* @param bufferedIO The bufferedIO flag.
* @param advertiseURI
* @param layout The Layout.
* @param bufferSize the buffer size
* @param immediateFlush flush on every write or not
* @param createOnDemand true if you want to lazy-create the file (a.k.a. on-demand.)
* @param filePermissions File permissions
* @param fileOwner File owner
* @param fileGroup File group
* @param configuration The configuration
*/
public FactoryData(
final String fileName,
final String pattern,
final boolean append,
final boolean bufferedIO,
final TriggeringPolicy policy,
final RolloverStrategy strategy,
final String advertiseURI,
final Layout<? extends Serializable> layout,
final int bufferSize,
final boolean immediateFlush,
final boolean createOnDemand,
final String filePermissions,
final String fileOwner,
final String fileGroup,
final Configuration configuration) {
super(configuration);
this.fileName = fileName;
this.pattern = pattern;
this.append = append;
this.bufferedIO = bufferedIO;
this.bufferSize = bufferSize;
this.policy = policy;
this.strategy = strategy;
this.advertiseURI = advertiseURI;
this.layout = layout;
this.immediateFlush = immediateFlush;
this.createOnDemand = createOnDemand;
this.filePermissions = filePermissions;
this.fileOwner = fileOwner;
this.fileGroup = fileGroup;
}
public TriggeringPolicy getTriggeringPolicy() {
return this.policy;
}
public RolloverStrategy getRolloverStrategy() {
return this.strategy;
}
public String getPattern() {
return pattern;
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append(super.toString());
builder.append("[pattern=");
builder.append(pattern);
builder.append(", append=");
builder.append(append);
builder.append(", bufferedIO=");
builder.append(bufferedIO);
builder.append(", bufferSize=");
builder.append(bufferSize);
builder.append(", policy=");
builder.append(policy);
builder.append(", strategy=");
builder.append(strategy);
builder.append(", advertiseURI=");
builder.append(advertiseURI);
builder.append(", layout=");
builder.append(layout);
builder.append(", filePermissions=");
builder.append(filePermissions);
builder.append(", fileOwner=");
builder.append(fileOwner);
builder.append("]");
return builder.toString();
}
}
/**
* Updates the RollingFileManager's data during a reconfiguration. This method should be considered private.
* It is not thread safe and calling it outside of a reconfiguration may lead to errors. This method may be
* made protected in a future release.
* @param data The data to update.
*/
@Override
public void updateData(final Object data) {
final FactoryData factoryData = (FactoryData) data;
setRolloverStrategy(factoryData.getRolloverStrategy());
setPatternProcessor(new PatternProcessor(factoryData.getPattern(), getPatternProcessor()));
setTriggeringPolicy(factoryData.getTriggeringPolicy());
}
/**
* Factory to create a RollingFileManager.
*/
private static class RollingFileManagerFactory implements ManagerFactory<RollingFileManager, FactoryData> {
/**
* Creates a RollingFileManager.
* @param name The name of the entity to manage.
* @param data The data required to create the entity.
* @return a RollingFileManager.
*/
@Override
@SuppressFBWarnings(
value = {"PATH_TRAVERSAL_IN", "PATH_TRAVERSAL_OUT"},
justification = "The destination file should be specified in the configuration file.")
public RollingFileManager createManager(final String name, final FactoryData data) {
long size = 0;
File file = null;
if (data.fileName != null) {
file = new File(data.fileName);
try {
FileUtils.makeParentDirs(file);
final boolean created = data.createOnDemand ? false : file.createNewFile();
LOGGER.trace("New file '{}' created = {}", name, created);
} catch (final IOException ioe) {
LOGGER.error("Unable to create file " + name, ioe);
return null;
}
size = data.append ? file.length() : 0;
}
try {
final int actualSize = data.bufferedIO ? data.bufferSize : Constants.ENCODER_BYTE_BUFFER_SIZE;
final ByteBuffer buffer = ByteBuffer.wrap(new byte[actualSize]);
final OutputStream os = data.createOnDemand || data.fileName == null
? null
: new FileOutputStream(data.fileName, data.append);
// LOG4J2-531 create file first so time has valid value.
final long initialTime = file == null || !file.exists() ? 0 : initialFileTime(file);
final boolean writeHeader = file != null && file.exists() && file.length() == 0;
final RollingFileManager rm = new RollingFileManager(
data.getLoggerContext(),
data.fileName,
data.pattern,
os,
data.append,
data.createOnDemand,
size,
initialTime,
data.policy,
data.strategy,
data.advertiseURI,
data.layout,
data.filePermissions,
data.fileOwner,
data.fileGroup,
writeHeader,
buffer);
if (os != null && rm.isAttributeViewEnabled()) {
rm.defineAttributeView(file.toPath());
}
return rm;
} catch (final IOException ex) {
LOGGER.error("RollingFileManager (" + name + ") " + ex, ex);
}
return null;
}
}
static long initialFileTime(final File file) {
final Path path = file.toPath();
if (Files.exists(path)) {
try {
final BasicFileAttributes attrs = Files.readAttributes(path, BasicFileAttributes.class);
final FileTime fileTime = attrs.creationTime();
if (fileTime.compareTo(EPOCH) > 0) {
LOGGER.debug("Returning file creation time for {}", file.getAbsolutePath());
return alignMillisToSecond(fileTime.toMillis());
}
LOGGER.info("Unable to obtain file creation time for {}", file.getAbsolutePath());
} catch (final Exception ex) {
LOGGER.info(
"Unable to calculate file creation time for {}: {}", file.getAbsolutePath(), ex.getMessage());
}
}
return alignMillisToSecond(file.lastModified());
}
/**
* @see <a href="https://github.com/apache/logging-log4j2/issues/3068">Issue #3068</a>
*/
static long alignMillisToSecond(long millis) {
return Math.round(millis / 1000d) * 1000;
}
private static class EmptyQueue extends ArrayBlockingQueue<Runnable> {
/**
*
*/
private static final long serialVersionUID = 1L;
EmptyQueue() {
super(1);
}
@Override
public int remainingCapacity() {
return 0;
}
@Override
public boolean add(final Runnable runnable) {
throw new IllegalStateException("Queue is full");
}
@Override
public void put(final Runnable runnable) throws InterruptedException {
/* No point in going into a permanent wait */
throw new InterruptedException("Unable to insert into queue");
}
@Override
public boolean offer(final Runnable runnable, final long timeout, final TimeUnit timeUnit)
throws InterruptedException {
Thread.sleep(timeUnit.toMillis(timeout));
return false;
}
@Override
public boolean addAll(final Collection<? extends Runnable> collection) {
if (collection.size() > 0) {
throw new IllegalArgumentException("Too many items in collection");
}
return false;
}
}
}
|
googleapis/google-cloud-java | 37,436 | java-contact-center-insights/proto-google-cloud-contact-center-insights-v1/src/main/java/com/google/cloud/contactcenterinsights/v1/UpdateQaQuestionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contactcenterinsights/v1/contact_center_insights.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contactcenterinsights.v1;
/**
*
*
* <pre>
* The request for updating a QaQuestion.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest}
*/
public final class UpdateQaQuestionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest)
UpdateQaQuestionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateQaQuestionRequest.newBuilder() to construct.
private UpdateQaQuestionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateQaQuestionRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateQaQuestionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_UpdateQaQuestionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_UpdateQaQuestionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest.class,
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest.Builder.class);
}
private int bitField0_;
public static final int QA_QUESTION_FIELD_NUMBER = 1;
private com.google.cloud.contactcenterinsights.v1.QaQuestion qaQuestion_;
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the qaQuestion field is set.
*/
@java.lang.Override
public boolean hasQaQuestion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The qaQuestion.
*/
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.QaQuestion getQaQuestion() {
return qaQuestion_ == null
? com.google.cloud.contactcenterinsights.v1.QaQuestion.getDefaultInstance()
: qaQuestion_;
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.QaQuestionOrBuilder getQaQuestionOrBuilder() {
return qaQuestion_ == null
? com.google.cloud.contactcenterinsights.v1.QaQuestion.getDefaultInstance()
: qaQuestion_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getQaQuestion());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getQaQuestion());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest)) {
return super.equals(obj);
}
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest other =
(com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest) obj;
if (hasQaQuestion() != other.hasQaQuestion()) return false;
if (hasQaQuestion()) {
if (!getQaQuestion().equals(other.getQaQuestion())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasQaQuestion()) {
hash = (37 * hash) + QA_QUESTION_FIELD_NUMBER;
hash = (53 * hash) + getQaQuestion().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request for updating a QaQuestion.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest)
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_UpdateQaQuestionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_UpdateQaQuestionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest.class,
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest.Builder.class);
}
// Construct using
// com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getQaQuestionFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
qaQuestion_ = null;
if (qaQuestionBuilder_ != null) {
qaQuestionBuilder_.dispose();
qaQuestionBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_UpdateQaQuestionRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest
getDefaultInstanceForType() {
return com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest build() {
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest buildPartial() {
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest result =
new com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.qaQuestion_ = qaQuestionBuilder_ == null ? qaQuestion_ : qaQuestionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest) {
return mergeFrom((com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest other) {
if (other
== com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest.getDefaultInstance())
return this;
if (other.hasQaQuestion()) {
mergeQaQuestion(other.getQaQuestion());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getQaQuestionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.contactcenterinsights.v1.QaQuestion qaQuestion_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.contactcenterinsights.v1.QaQuestion,
com.google.cloud.contactcenterinsights.v1.QaQuestion.Builder,
com.google.cloud.contactcenterinsights.v1.QaQuestionOrBuilder>
qaQuestionBuilder_;
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the qaQuestion field is set.
*/
public boolean hasQaQuestion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The qaQuestion.
*/
public com.google.cloud.contactcenterinsights.v1.QaQuestion getQaQuestion() {
if (qaQuestionBuilder_ == null) {
return qaQuestion_ == null
? com.google.cloud.contactcenterinsights.v1.QaQuestion.getDefaultInstance()
: qaQuestion_;
} else {
return qaQuestionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQaQuestion(com.google.cloud.contactcenterinsights.v1.QaQuestion value) {
if (qaQuestionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
qaQuestion_ = value;
} else {
qaQuestionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQaQuestion(
com.google.cloud.contactcenterinsights.v1.QaQuestion.Builder builderForValue) {
if (qaQuestionBuilder_ == null) {
qaQuestion_ = builderForValue.build();
} else {
qaQuestionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeQaQuestion(com.google.cloud.contactcenterinsights.v1.QaQuestion value) {
if (qaQuestionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& qaQuestion_ != null
&& qaQuestion_
!= com.google.cloud.contactcenterinsights.v1.QaQuestion.getDefaultInstance()) {
getQaQuestionBuilder().mergeFrom(value);
} else {
qaQuestion_ = value;
}
} else {
qaQuestionBuilder_.mergeFrom(value);
}
if (qaQuestion_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearQaQuestion() {
bitField0_ = (bitField0_ & ~0x00000001);
qaQuestion_ = null;
if (qaQuestionBuilder_ != null) {
qaQuestionBuilder_.dispose();
qaQuestionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.contactcenterinsights.v1.QaQuestion.Builder getQaQuestionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getQaQuestionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.contactcenterinsights.v1.QaQuestionOrBuilder getQaQuestionOrBuilder() {
if (qaQuestionBuilder_ != null) {
return qaQuestionBuilder_.getMessageOrBuilder();
} else {
return qaQuestion_ == null
? com.google.cloud.contactcenterinsights.v1.QaQuestion.getDefaultInstance()
: qaQuestion_;
}
}
/**
*
*
* <pre>
* Required. The QaQuestion to update.
* </pre>
*
* <code>
* .google.cloud.contactcenterinsights.v1.QaQuestion qa_question = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.contactcenterinsights.v1.QaQuestion,
com.google.cloud.contactcenterinsights.v1.QaQuestion.Builder,
com.google.cloud.contactcenterinsights.v1.QaQuestionOrBuilder>
getQaQuestionFieldBuilder() {
if (qaQuestionBuilder_ == null) {
qaQuestionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.contactcenterinsights.v1.QaQuestion,
com.google.cloud.contactcenterinsights.v1.QaQuestion.Builder,
com.google.cloud.contactcenterinsights.v1.QaQuestionOrBuilder>(
getQaQuestion(), getParentForChildren(), isClean());
qaQuestion_ = null;
}
return qaQuestionBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. All possible fields can be
* updated by passing `*`, or a subset of the following updateable fields can
* be provided:
*
* * `abbreviation`
* * `answer_choices`
* * `answer_instructions`
* * `order`
* * `question_body`
* * `tags`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest)
private static final com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest();
}
public static com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateQaQuestionRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateQaQuestionRequest>() {
@java.lang.Override
public UpdateQaQuestionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateQaQuestionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateQaQuestionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.UpdateQaQuestionRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,460 | java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/SearchFilesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1beta1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1beta1;
/**
*
*
* <pre>
* Client-facing representation of a file search response.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.SearchFilesResponse}
*/
public final class SearchFilesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.SearchFilesResponse)
SearchFilesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchFilesResponse.newBuilder() to construct.
private SearchFilesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchFilesResponse() {
searchResults_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchFilesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_SearchFilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_SearchFilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.SearchFilesResponse.class,
com.google.cloud.dataform.v1beta1.SearchFilesResponse.Builder.class);
}
public static final int SEARCH_RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dataform.v1beta1.SearchResult> searchResults_;
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dataform.v1beta1.SearchResult> getSearchResultsList() {
return searchResults_;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dataform.v1beta1.SearchResultOrBuilder>
getSearchResultsOrBuilderList() {
return searchResults_;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public int getSearchResultsCount() {
return searchResults_.size();
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.SearchResult getSearchResults(int index) {
return searchResults_.get(index);
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.SearchResultOrBuilder getSearchResultsOrBuilder(
int index) {
return searchResults_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < searchResults_.size(); i++) {
output.writeMessage(1, searchResults_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < searchResults_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, searchResults_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1beta1.SearchFilesResponse)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1beta1.SearchFilesResponse other =
(com.google.cloud.dataform.v1beta1.SearchFilesResponse) obj;
if (!getSearchResultsList().equals(other.getSearchResultsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSearchResultsCount() > 0) {
hash = (37 * hash) + SEARCH_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getSearchResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1beta1.SearchFilesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Client-facing representation of a file search response.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.SearchFilesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.SearchFilesResponse)
com.google.cloud.dataform.v1beta1.SearchFilesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_SearchFilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_SearchFilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.SearchFilesResponse.class,
com.google.cloud.dataform.v1beta1.SearchFilesResponse.Builder.class);
}
// Construct using com.google.cloud.dataform.v1beta1.SearchFilesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (searchResultsBuilder_ == null) {
searchResults_ = java.util.Collections.emptyList();
} else {
searchResults_ = null;
searchResultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_SearchFilesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.SearchFilesResponse getDefaultInstanceForType() {
return com.google.cloud.dataform.v1beta1.SearchFilesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.SearchFilesResponse build() {
com.google.cloud.dataform.v1beta1.SearchFilesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.SearchFilesResponse buildPartial() {
com.google.cloud.dataform.v1beta1.SearchFilesResponse result =
new com.google.cloud.dataform.v1beta1.SearchFilesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dataform.v1beta1.SearchFilesResponse result) {
if (searchResultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
searchResults_ = java.util.Collections.unmodifiableList(searchResults_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.searchResults_ = searchResults_;
} else {
result.searchResults_ = searchResultsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dataform.v1beta1.SearchFilesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1beta1.SearchFilesResponse) {
return mergeFrom((com.google.cloud.dataform.v1beta1.SearchFilesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1beta1.SearchFilesResponse other) {
if (other == com.google.cloud.dataform.v1beta1.SearchFilesResponse.getDefaultInstance())
return this;
if (searchResultsBuilder_ == null) {
if (!other.searchResults_.isEmpty()) {
if (searchResults_.isEmpty()) {
searchResults_ = other.searchResults_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSearchResultsIsMutable();
searchResults_.addAll(other.searchResults_);
}
onChanged();
}
} else {
if (!other.searchResults_.isEmpty()) {
if (searchResultsBuilder_.isEmpty()) {
searchResultsBuilder_.dispose();
searchResultsBuilder_ = null;
searchResults_ = other.searchResults_;
bitField0_ = (bitField0_ & ~0x00000001);
searchResultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSearchResultsFieldBuilder()
: null;
} else {
searchResultsBuilder_.addAllMessages(other.searchResults_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dataform.v1beta1.SearchResult m =
input.readMessage(
com.google.cloud.dataform.v1beta1.SearchResult.parser(), extensionRegistry);
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.add(m);
} else {
searchResultsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dataform.v1beta1.SearchResult> searchResults_ =
java.util.Collections.emptyList();
private void ensureSearchResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
searchResults_ =
new java.util.ArrayList<com.google.cloud.dataform.v1beta1.SearchResult>(searchResults_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1beta1.SearchResult,
com.google.cloud.dataform.v1beta1.SearchResult.Builder,
com.google.cloud.dataform.v1beta1.SearchResultOrBuilder>
searchResultsBuilder_;
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1beta1.SearchResult> getSearchResultsList() {
if (searchResultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(searchResults_);
} else {
return searchResultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public int getSearchResultsCount() {
if (searchResultsBuilder_ == null) {
return searchResults_.size();
} else {
return searchResultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1beta1.SearchResult getSearchResults(int index) {
if (searchResultsBuilder_ == null) {
return searchResults_.get(index);
} else {
return searchResultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder setSearchResults(
int index, com.google.cloud.dataform.v1beta1.SearchResult value) {
if (searchResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSearchResultsIsMutable();
searchResults_.set(index, value);
onChanged();
} else {
searchResultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder setSearchResults(
int index, com.google.cloud.dataform.v1beta1.SearchResult.Builder builderForValue) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.set(index, builderForValue.build());
onChanged();
} else {
searchResultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(com.google.cloud.dataform.v1beta1.SearchResult value) {
if (searchResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSearchResultsIsMutable();
searchResults_.add(value);
onChanged();
} else {
searchResultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(
int index, com.google.cloud.dataform.v1beta1.SearchResult value) {
if (searchResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSearchResultsIsMutable();
searchResults_.add(index, value);
onChanged();
} else {
searchResultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(
com.google.cloud.dataform.v1beta1.SearchResult.Builder builderForValue) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.add(builderForValue.build());
onChanged();
} else {
searchResultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(
int index, com.google.cloud.dataform.v1beta1.SearchResult.Builder builderForValue) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.add(index, builderForValue.build());
onChanged();
} else {
searchResultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder addAllSearchResults(
java.lang.Iterable<? extends com.google.cloud.dataform.v1beta1.SearchResult> values) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, searchResults_);
onChanged();
} else {
searchResultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder clearSearchResults() {
if (searchResultsBuilder_ == null) {
searchResults_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
searchResultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public Builder removeSearchResults(int index) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.remove(index);
onChanged();
} else {
searchResultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1beta1.SearchResult.Builder getSearchResultsBuilder(
int index) {
return getSearchResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1beta1.SearchResultOrBuilder getSearchResultsOrBuilder(
int index) {
if (searchResultsBuilder_ == null) {
return searchResults_.get(index);
} else {
return searchResultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dataform.v1beta1.SearchResultOrBuilder>
getSearchResultsOrBuilderList() {
if (searchResultsBuilder_ != null) {
return searchResultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(searchResults_);
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1beta1.SearchResult.Builder addSearchResultsBuilder() {
return getSearchResultsFieldBuilder()
.addBuilder(com.google.cloud.dataform.v1beta1.SearchResult.getDefaultInstance());
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1beta1.SearchResult.Builder addSearchResultsBuilder(
int index) {
return getSearchResultsFieldBuilder()
.addBuilder(index, com.google.cloud.dataform.v1beta1.SearchResult.getDefaultInstance());
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1beta1.SearchResult search_results = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1beta1.SearchResult.Builder>
getSearchResultsBuilderList() {
return getSearchResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1beta1.SearchResult,
com.google.cloud.dataform.v1beta1.SearchResult.Builder,
com.google.cloud.dataform.v1beta1.SearchResultOrBuilder>
getSearchResultsFieldBuilder() {
if (searchResultsBuilder_ == null) {
searchResultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1beta1.SearchResult,
com.google.cloud.dataform.v1beta1.SearchResult.Builder,
com.google.cloud.dataform.v1beta1.SearchResultOrBuilder>(
searchResults_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
searchResults_ = null;
}
return searchResultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.SearchFilesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.SearchFilesResponse)
private static final com.google.cloud.dataform.v1beta1.SearchFilesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.SearchFilesResponse();
}
public static com.google.cloud.dataform.v1beta1.SearchFilesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchFilesResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchFilesResponse>() {
@java.lang.Override
public SearchFilesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchFilesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchFilesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.SearchFilesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/fineract | 37,500 | fineract-provider/src/main/java/org/apache/fineract/portfolio/loanaccount/api/LoanChargesApiResource.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.loanaccount.api;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.ArraySchema;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.parameters.RequestBody;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.DELETE;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.PUT;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.QueryParam;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.UriInfo;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import lombok.RequiredArgsConstructor;
import org.apache.fineract.commands.domain.CommandWrapper;
import org.apache.fineract.commands.service.CommandWrapperBuilder;
import org.apache.fineract.commands.service.PortfolioCommandSourceWritePlatformService;
import org.apache.fineract.infrastructure.core.api.ApiRequestParameterHelper;
import org.apache.fineract.infrastructure.core.data.CommandProcessingResult;
import org.apache.fineract.infrastructure.core.domain.ExternalId;
import org.apache.fineract.infrastructure.core.exception.UnrecognizedQueryParamException;
import org.apache.fineract.infrastructure.core.serialization.ApiRequestJsonSerializationSettings;
import org.apache.fineract.infrastructure.core.serialization.DefaultToApiJsonSerializer;
import org.apache.fineract.infrastructure.core.service.CommandParameterUtil;
import org.apache.fineract.infrastructure.core.service.ExternalIdFactory;
import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext;
import org.apache.fineract.portfolio.charge.data.ChargeData;
import org.apache.fineract.portfolio.charge.domain.ChargeTimeType;
import org.apache.fineract.portfolio.charge.exception.LoanChargeNotFoundException;
import org.apache.fineract.portfolio.charge.service.ChargeReadPlatformService;
import org.apache.fineract.portfolio.loanaccount.data.LoanChargeData;
import org.apache.fineract.portfolio.loanaccount.data.LoanInstallmentChargeData;
import org.apache.fineract.portfolio.loanaccount.service.LoanChargeReadPlatformService;
import org.apache.fineract.portfolio.loanaccount.service.LoanReadPlatformService;
import org.springframework.stereotype.Component;
@Path("/v1/loans")
@Component
@Tag(name = "Loan Charges", description = "Its typical for MFIs to add extra costs for their loan products. They can be either Fees or Penalties.\n"
+ "\n"
+ "Loan Charges are instances of Charges and represent either fees and penalties for loan products. Refer Charges for documentation of the various properties of a charge, Only additional properties ( specific to the context of a Charge being associated with a Loan) are described here")
@RequiredArgsConstructor
public class LoanChargesApiResource {
public static final String COMMAND_PAY = "pay";
public static final String COMMAND_WAIVE = "waive";
public static final String COMMAND_ADJUSTMENT = "adjustment";
public static final String COMMAND_DEACTIVATE_OVERDUE = "deactivateOverdue";
private static final Set<String> RESPONSE_DATA_PARAMETERS = new HashSet<>(
Arrays.asList("id", "chargeId", "name", "penalty", "chargeTimeType", "dueAsOfDate", "chargeCalculationType", "percentage",
"amountPercentageAppliedTo", "currency", "amountWaived", "amountWrittenOff", "amountOutstanding", "amountOrPercentage",
"amount", "amountPaid", "chargeOptions", "installmentChargeData", "externalId"));
private static final String RESOURCE_NAME_FOR_PERMISSIONS = "LOAN";
private final PlatformSecurityContext context;
private final ChargeReadPlatformService chargeReadPlatformService;
private final LoanChargeReadPlatformService loanChargeReadPlatformService;
private final DefaultToApiJsonSerializer<LoanChargeData> toApiJsonSerializer;
private final ApiRequestParameterHelper apiRequestParameterHelper;
private final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService;
private final LoanReadPlatformService loanReadPlatformService;
@GET
@Path("{loanId}/charges")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "List Loan Charges", description = "It lists all the Loan Charges specific to a Loan \n\n" + "Example Requests:\n"
+ "\n" + "loans/1/charges\n" + "\n" + "\n" + "loans/1/charges?fields=name,amountOrPercentage")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(array = @ArraySchema(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesChargeIdResponse.class)))) })
public String retrieveAllLoanCharges(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@Context final UriInfo uriInfo) {
return retrieveAllLoanCharges(loanId, null, uriInfo);
}
@GET
@Path("external-id/{loanExternalId}/charges")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "List Loan Charges", description = "It lists all the Loan Charges specific to a Loan \n\n" + "Example Requests:\n"
+ "\n" + "loans/1/charges\n" + "\n" + "\n" + "loans/1/charges?fields=name,amountOrPercentage")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(array = @ArraySchema(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesChargeIdResponse.class)))) })
public String retrieveAllLoanCharges(
@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@Context final UriInfo uriInfo) {
return retrieveAllLoanCharges(null, loanExternalId, uriInfo);
}
@GET
@Path("{loanId}/charges/template")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Retrieve Loan Charges Template", description = "This is a convenience resource. It can be useful when building maintenance user interface screens for client applications. The template data returned consists of any or all of:\n"
+ "\n" + "Field Defaults\n" + "Allowed description Lists\n" + "Example Request:\n" + "\n" + "loans/1/charges/template\n" + "\n")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesTemplateResponse.class))) })
public String retrieveTemplate(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@Context final UriInfo uriInfo) {
return retrieveTemplate(loanId, null, uriInfo);
}
@GET
@Path("external-id/{loanExternalId}/charges/template")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Retrieve Loan Charges Template", description = "This is a convenience resource. It can be useful when building maintenance user interface screens for client applications. The template data returned consists of any or all of:\n"
+ "\n" + "Field Defaults\n" + "Allowed description Lists\n" + "Example Request:\n" + "\n" + "loans/1/charges/template\n" + "\n")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesTemplateResponse.class))) })
public String retrieveTemplate(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@Context final UriInfo uriInfo) {
return retrieveTemplate(null, loanExternalId, uriInfo);
}
@GET
@Path("{loanId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Retrieve a Loan Charge", description = "Retrieves Loan Charge according to the Loan ID and Loan Charge ID"
+ "Example Requests:\n" + "\n" + "/loans/1/charges/1\n" + "\n" + "\n" + "/loans/1/charges/1?fields=name,amountOrPercentage")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesChargeIdResponse.class))) })
public String retrieveLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId, @Context final UriInfo uriInfo) {
return retrieveLoanCharge(loanId, null, loanChargeId, null, uriInfo);
}
@GET
@Path("{loanId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Retrieve a Loan Charge", description = "Retrieves Loan Charge according to the Loan ID and Loan Charge External ID"
+ "Example Requests:\n" + "\n" + "/loans/1/charges/1\n" + "\n" + "\n"
+ "/loans/1/charges/external-id/1?fields=name,amountOrPercentage")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesChargeIdResponse.class))) })
public String retrieveLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId,
@Context final UriInfo uriInfo) {
return retrieveLoanCharge(loanId, null, null, loanChargeExternalId, uriInfo);
}
@GET
@Path("external-id/{loanExternalId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Retrieve a Loan Charge", description = "Retrieves Loan Charge according to the Loan external ID and Loan Charge ID"
+ "Example Requests:\n" + "\n" + "/loans/1/charges/1\n" + "\n" + "\n" + "/loans/1/charges/1?fields=name,amountOrPercentage")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesChargeIdResponse.class))) })
public String retrieveLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId, @Context final UriInfo uriInfo) {
return retrieveLoanCharge(null, loanExternalId, loanChargeId, null, uriInfo);
}
@GET
@Path("external-id/{loanExternalId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Retrieve a Loan Charge", description = "Retrieves Loan Charge according to the Loan External ID and Loan Charge External ID"
+ "Example Requests:\n" + "\n" + "/loans/1/charges/1\n" + "\n" + "\n" + "/loans/1/charges/1?fields=name,amountOrPercentage")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.GetLoansLoanIdChargesChargeIdResponse.class))) })
public String retrieveLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId,
@Context final UriInfo uriInfo) {
return retrieveLoanCharge(null, loanExternalId, null, loanChargeExternalId, uriInfo);
}
@POST
@Path("{loanId}/charges")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Create a Loan Charge (no command provided) or Pay a charge (command=pay)", description = "Creates a Loan Charge | Pay a Loan Charge")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesResponse.class))) })
public String executeLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@QueryParam("command") @Parameter(description = "command") final String commandParam,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return handleExecuteLoanCharge(loanId, null, commandParam, apiRequestBodyAsJson);
}
@POST
@Path("external-id/{loanExternalId}/charges")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Create a Loan Charge (no command provided) or Pay a charge (command=pay)", description = "Creates a Loan Charge | Pay a Loan Charge")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesResponse.class))) })
public String executeLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@QueryParam("command") @Parameter(description = "command") final String commandParam,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return handleExecuteLoanCharge(null, loanExternalId, commandParam, apiRequestBodyAsJson);
}
@POST
@Path("{loanId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Pay / Waive / Adjustment for Loan Charge", description = "Loan Charge will be paid if the loan is linked with a savings account | Waive Loan Charge | Add Charge Adjustment")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdResponse.class))) })
public String executeLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId,
@QueryParam("command") @Parameter(description = "command") final String commandParam,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return handleExecuteLoanCharge(loanId, null, loanChargeId, null, commandParam, apiRequestBodyAsJson);
}
@POST
@Path("{loanId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Pay / Waive / Adjustment for Loan Charge", description = "Loan Charge will be paid if the loan is linked with a savings account | Waive Loan Charge | Add Charge Adjustment")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdResponse.class))) })
public String executeLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId,
@QueryParam("command") @Parameter(description = "command") final String commandParam,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return handleExecuteLoanCharge(loanId, null, null, loanChargeExternalId, commandParam, apiRequestBodyAsJson);
}
@POST
@Path("external-id/{loanExternalId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Pay / Waive / Adjustment for Loan Charge", description = "Loan Charge will be paid if the loan is linked with a savings account | Waive Loan Charge | Add Charge Adjustment")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdResponse.class))) })
public String executeLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId,
@QueryParam("command") @Parameter(description = "command") final String commandParam,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return handleExecuteLoanCharge(null, loanExternalId, loanChargeId, null, commandParam, apiRequestBodyAsJson);
}
@POST
@Path("external-id/{loanExternalId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Pay / Waive / Adjustment for Loan Charge", description = "Loan Charge will be paid if the loan is linked with a savings account | Waive Loan Charge | Add Charge Adjustment")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PostLoansLoanIdChargesChargeIdResponse.class))) })
public String executeLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId,
@QueryParam("command") @Parameter(description = "command") final String commandParam,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return handleExecuteLoanCharge(null, loanExternalId, null, loanChargeExternalId, commandParam, apiRequestBodyAsJson);
}
@PUT
@Path("{loanId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Update a Loan Charge", description = "Currently Loan Charges may be updated only if the Loan is not yet approved")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdResponse.class))) })
public String updateLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return updateLoanCharge(loanId, null, loanChargeId, null, apiRequestBodyAsJson);
}
@PUT
@Path("{loanId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Update a Loan Charge", description = "Currently Loan Charges may be updated only if the Loan is not yet approved")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdResponse.class))) })
public String updateLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return updateLoanCharge(loanId, null, null, loanChargeExternalId, apiRequestBodyAsJson);
}
@PUT
@Path("external-id/{loanExternalId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Update a Loan Charge", description = "Currently Loan Charges may be updated only if the Loan is not yet approved")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdResponse.class))) })
public String updateLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return updateLoanCharge(null, loanExternalId, loanChargeId, null, apiRequestBodyAsJson);
}
@PUT
@Path("external-id/{loanExternalId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Update a Loan Charge", description = "Currently Loan Charges may be updated only if the Loan is not yet approved")
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdRequest.class)))
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.PutLoansLoanIdChargesChargeIdResponse.class))) })
public String updateLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId,
@Parameter(hidden = true) final String apiRequestBodyAsJson) {
return updateLoanCharge(null, loanExternalId, null, loanChargeExternalId, apiRequestBodyAsJson);
}
@DELETE
@Path("{loanId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Delete a Loan Charge", description = "Note: Currently, A Loan Charge may only be removed from Loans that are not yet approved.")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.DeleteLoansLoanIdChargesChargeIdResponse.class))) })
public String deleteLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId) {
return deleteLoanCharge(loanId, null, loanChargeId, null);
}
@DELETE
@Path("{loanId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Delete a Loan Charge", description = "Note: Currently, A Loan Charge may only be removed from Loans that are not yet approved.")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.DeleteLoansLoanIdChargesChargeIdResponse.class))) })
public String deleteLoanCharge(@PathParam("loanId") @Parameter(description = "loanId") final Long loanId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId) {
return deleteLoanCharge(loanId, null, null, loanChargeExternalId);
}
@DELETE
@Path("external-id/{loanExternalId}/charges/{loanChargeId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Delete a Loan Charge", description = "Note: Currently, A Loan Charge may only be removed from Loans that are not yet approved.")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.DeleteLoansLoanIdChargesChargeIdResponse.class))) })
public String deleteLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeId") @Parameter(description = "loanChargeId") final Long loanChargeId) {
return deleteLoanCharge(null, loanExternalId, loanChargeId, null);
}
@DELETE
@Path("external-id/{loanExternalId}/charges/external-id/{loanChargeExternalId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
@Operation(summary = "Delete a Loan Charge", description = "Note: Currently, A Loan Charge may only be removed from Loans that are not yet approved.")
@ApiResponses({
@ApiResponse(responseCode = "200", description = "OK", content = @Content(schema = @Schema(implementation = LoanChargesApiResourceSwagger.DeleteLoansLoanIdChargesChargeIdResponse.class))) })
public String deleteLoanCharge(@PathParam("loanExternalId") @Parameter(description = "loanExternalId") final String loanExternalId,
@PathParam("loanChargeExternalId") @Parameter(description = "loanChargeExternalId") final String loanChargeExternalId) {
return deleteLoanCharge(null, loanExternalId, null, loanChargeExternalId);
}
private String deleteLoanCharge(final Long loanId, final String loanExternalIdStr, final Long loanChargeId,
final String loanChargeExternalIdStr) {
ExternalId loanExternalId = ExternalIdFactory.produce(loanExternalIdStr);
ExternalId loanChargeExternalId = ExternalIdFactory.produce(loanChargeExternalIdStr);
Long resolvedLoanId = loanId == null ? loanReadPlatformService.getResolvedLoanId(loanExternalId) : loanId;
Long resolvedLoanChargeId = getResolvedLoanChargeId(loanChargeId, loanChargeExternalId);
final CommandWrapper commandRequest = new CommandWrapperBuilder().deleteLoanCharge(resolvedLoanId, resolvedLoanChargeId).build();
final CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
return this.toApiJsonSerializer.serialize(result);
}
private String retrieveLoanCharge(final Long loanId, final String loanExternalIdStr, final Long loanChargeId,
final String loanChargeExternalIdStr, final UriInfo uriInfo) {
this.context.authenticatedUser().validateHasReadPermission(RESOURCE_NAME_FOR_PERMISSIONS);
ExternalId loanExternalId = ExternalIdFactory.produce(loanExternalIdStr);
ExternalId loanChargeExternalId = ExternalIdFactory.produce(loanChargeExternalIdStr);
Long resolvedLoanId = loanId == null ? loanReadPlatformService.getResolvedLoanId(loanExternalId) : loanId;
Long resolvedLoanChargeId = getResolvedLoanChargeId(loanChargeId, loanChargeExternalId);
final LoanChargeData loanCharge = this.loanChargeReadPlatformService.retrieveLoanChargeDetails(resolvedLoanChargeId,
resolvedLoanId);
final List<LoanInstallmentChargeData> installmentChargeData = this.loanChargeReadPlatformService
.retrieveInstallmentLoanCharges(resolvedLoanChargeId, true);
final LoanChargeData loanChargeData = new LoanChargeData(loanCharge, installmentChargeData);
final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters());
return this.toApiJsonSerializer.serialize(settings, loanChargeData, RESPONSE_DATA_PARAMETERS);
}
private String handleExecuteLoanCharge(final Long loanId, final String loanExternalIdStr, final String commandParam,
final String apiRequestBodyAsJson) {
ExternalId loanExternalId = ExternalIdFactory.produce(loanExternalIdStr);
Long resolvedLoanId = loanId == null ? loanReadPlatformService.getResolvedLoanId(loanExternalId) : loanId;
CommandProcessingResult result;
if (CommandParameterUtil.is(commandParam, COMMAND_PAY)) {
final CommandWrapper commandRequest = new CommandWrapperBuilder().payLoanCharge(resolvedLoanId, null)
.withJson(apiRequestBodyAsJson).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (CommandParameterUtil.is(commandParam, COMMAND_DEACTIVATE_OVERDUE)) {
final CommandWrapper commandRequest = new CommandWrapperBuilder().deactivateOverdueLoanCharges(resolvedLoanId, null)
.withJson(apiRequestBodyAsJson).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else {
final CommandWrapper commandRequest = new CommandWrapperBuilder().createLoanCharge(resolvedLoanId)
.withJson(apiRequestBodyAsJson).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
}
return this.toApiJsonSerializer.serialize(result);
}
private String handleExecuteLoanCharge(final Long loanId, final String loanExternalIdStr, final Long loanChargeId,
final String loanChargeExternalIdStr, final String commandParam, final String apiRequestBodyAsJson) {
ExternalId loanExternalId = ExternalIdFactory.produce(loanExternalIdStr);
ExternalId loanChargeExternalId = ExternalIdFactory.produce(loanChargeExternalIdStr);
Long resolvedLoanId = loanId == null ? loanReadPlatformService.getResolvedLoanId(loanExternalId) : loanId;
Long resolvedLoanChargeId = getResolvedLoanChargeId(loanChargeId, loanChargeExternalId);
final CommandWrapperBuilder builder = new CommandWrapperBuilder().withJson(apiRequestBodyAsJson);
CommandProcessingResult result;
if (CommandParameterUtil.is(commandParam, COMMAND_WAIVE)) {
final CommandWrapper commandRequest = builder.waiveLoanCharge(resolvedLoanId, resolvedLoanChargeId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (CommandParameterUtil.is(commandParam, COMMAND_PAY)) {
final CommandWrapper commandRequest = builder.payLoanCharge(resolvedLoanId, resolvedLoanChargeId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (CommandParameterUtil.is(commandParam, COMMAND_ADJUSTMENT)) {
final CommandWrapper commandRequest = builder.adjustmentForLoanCharge(resolvedLoanId, resolvedLoanChargeId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else {
throw new UnrecognizedQueryParamException("command", commandParam);
}
if (result == null) {
throw new UnrecognizedQueryParamException("command", commandParam);
}
return this.toApiJsonSerializer.serialize(result);
}
private String updateLoanCharge(final Long loanId, final String loanExternalIdStr, final Long loanChargeId,
final String loanChargeExternalIdStr, final String apiRequestBodyAsJson) {
ExternalId loanExternalId = ExternalIdFactory.produce(loanExternalIdStr);
ExternalId loanChargeExternalId = ExternalIdFactory.produce(loanChargeExternalIdStr);
Long resolvedLoanId = loanId == null ? loanReadPlatformService.getResolvedLoanId(loanExternalId) : loanId;
Long resolvedLoanChargeId = getResolvedLoanChargeId(loanChargeId, loanChargeExternalId);
final CommandWrapper commandRequest = new CommandWrapperBuilder().updateLoanCharge(resolvedLoanId, resolvedLoanChargeId)
.withJson(apiRequestBodyAsJson).build();
final CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
return this.toApiJsonSerializer.serialize(result);
}
private String retrieveAllLoanCharges(final Long loanId, final String loanExternalIdStr, final UriInfo uriInfo) {
this.context.authenticatedUser().validateHasReadPermission(RESOURCE_NAME_FOR_PERMISSIONS);
ExternalId loanExternalId = ExternalIdFactory.produce(loanExternalIdStr);
Long resolvedLoanId = loanId == null ? loanReadPlatformService.getResolvedLoanId(loanExternalId) : loanId;
final Collection<LoanChargeData> loanCharges = this.loanChargeReadPlatformService.retrieveLoanCharges(resolvedLoanId);
final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters());
return this.toApiJsonSerializer.serialize(settings, loanCharges, RESPONSE_DATA_PARAMETERS);
}
private String retrieveTemplate(final Long loanId, final String loanExternalIdStr, final UriInfo uriInfo) {
this.context.authenticatedUser().validateHasReadPermission(RESOURCE_NAME_FOR_PERMISSIONS);
ExternalId loanExternalId = ExternalIdFactory.produce(loanExternalIdStr);
Long resolvedLoanId = loanId == null ? loanReadPlatformService.getResolvedLoanId(loanExternalId) : loanId;
final List<ChargeData> chargeOptions = this.chargeReadPlatformService.retrieveLoanAccountApplicableCharges(resolvedLoanId,
new ChargeTimeType[] { ChargeTimeType.OVERDUE_INSTALLMENT });
final LoanChargeData loanChargeTemplate = LoanChargeData.template(chargeOptions);
final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters());
return this.toApiJsonSerializer.serialize(settings, loanChargeTemplate, RESPONSE_DATA_PARAMETERS);
}
private Long getResolvedLoanChargeId(final Long loanChargeId, final ExternalId loanChargeExternalId) {
Long resolvedLoanChargeId = loanChargeId;
if (resolvedLoanChargeId == null) {
loanChargeExternalId.throwExceptionIfEmpty();
resolvedLoanChargeId = this.loanChargeReadPlatformService.retrieveLoanChargeIdByExternalId(loanChargeExternalId);
if (resolvedLoanChargeId == null) {
throw new LoanChargeNotFoundException(loanChargeExternalId);
}
}
return resolvedLoanChargeId;
}
}
|
apache/hudi | 38,081 | hudi-client/hudi-client-common/src/main/java/org/apache/hudi/metadata/HoodieMetadataWriteUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.metadata;
import org.apache.hudi.avro.model.HoodieMetadataRecord;
import org.apache.hudi.client.FailOnFirstErrorWriteStatus;
import org.apache.hudi.client.transaction.lock.InProcessLockProvider;
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.config.HoodieReaderConfig;
import org.apache.hudi.common.config.HoodieStorageConfig;
import org.apache.hudi.common.config.RecordMergeMode;
import org.apache.hudi.common.data.HoodieData;
import org.apache.hudi.common.data.HoodieListData;
import org.apache.hudi.common.data.HoodiePairData;
import org.apache.hudi.common.engine.EngineType;
import org.apache.hudi.common.engine.HoodieEngineContext;
import org.apache.hudi.common.engine.HoodieLocalEngineContext;
import org.apache.hudi.common.fs.ConsistencyGuardConfig;
import org.apache.hudi.common.model.HoodieAvroRecordMerger;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieCleaningPolicy;
import org.apache.hudi.common.model.HoodieCommitMetadata;
import org.apache.hudi.common.model.HoodieFailedWritesCleaningPolicy;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordMerger;
import org.apache.hudi.common.model.HoodieReplaceCommitMetadata;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.model.WriteConcurrencyMode;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTableVersion;
import org.apache.hudi.common.table.marker.MarkerType;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.view.FileSystemViewManager;
import org.apache.hudi.common.table.view.SpillableMapBasedFileSystemView;
import org.apache.hudi.common.table.view.SyncableFileSystemView;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.VisibleForTesting;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.config.HoodieArchivalConfig;
import org.apache.hudi.config.HoodieCleanConfig;
import org.apache.hudi.config.HoodieCompactionConfig;
import org.apache.hudi.config.HoodieLockConfig;
import org.apache.hudi.config.HoodiePayloadConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.config.metrics.HoodieMetricsConfig;
import org.apache.hudi.config.metrics.HoodieMetricsDatadogConfig;
import org.apache.hudi.config.metrics.HoodieMetricsGraphiteConfig;
import org.apache.hudi.config.metrics.HoodieMetricsJmxConfig;
import org.apache.hudi.config.metrics.HoodieMetricsM3Config;
import org.apache.hudi.config.metrics.HoodieMetricsPrometheusConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieMetadataException;
import org.apache.hudi.stats.HoodieColumnRangeMetadata;
import org.apache.hudi.storage.StoragePath;
import org.apache.hudi.storage.StoragePathInfo;
import org.apache.hudi.table.action.compact.strategy.UnBoundedCompactionStrategy;
import org.apache.hudi.util.Lazy;
import org.apache.avro.Schema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
import static org.apache.hudi.avro.HoodieAvroUtils.addMetadataFields;
import static org.apache.hudi.common.config.HoodieMetadataConfig.DEFAULT_METADATA_ASYNC_CLEAN;
import static org.apache.hudi.common.config.HoodieMetadataConfig.DEFAULT_METADATA_CLEANER_COMMITS_RETAINED;
import static org.apache.hudi.common.config.HoodieMetadataConfig.DEFAULT_METADATA_POPULATE_META_FIELDS;
import static org.apache.hudi.common.util.StringUtils.isNullOrEmpty;
import static org.apache.hudi.common.util.StringUtils.nonEmpty;
import static org.apache.hudi.common.util.ValidationUtils.checkState;
import static org.apache.hudi.metadata.HoodieTableMetadata.METADATA_TABLE_NAME_SUFFIX;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.PARTITION_NAME_PARTITION_STATS;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.convertMetadataToBloomFilterRecords;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.convertMetadataToColumnStatsRecords;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.convertMetadataToFilesPartitionRecords;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.convertMetadataToPartitionStatsRecords;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.convertMetadataToRecordIndexRecords;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.existingIndexVersionOrDefault;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.generateColumnStatsKeys;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.getColumnsToIndex;
import static org.apache.hudi.metadata.HoodieTableMetadataUtil.translateWriteStatToFileStats;
/**
* Metadata table write utils.
*/
public class HoodieMetadataWriteUtils {
private static final Logger LOG = LoggerFactory.getLogger(HoodieMetadataWriteUtils.class);
// Virtual keys support for metadata table. This Field is
// from the metadata payload schema.
public static final String RECORD_KEY_FIELD_NAME = HoodieMetadataPayload.KEY_FIELD_NAME;
// MDT writes are always prepped. Hence, insert and upsert shuffle parallelism are not important to be configured. Same for delete
// parallelism as deletes are not used.
// The finalize, cleaner and rollback tasks will operate on each fileGroup so their parallelism should be as large as the total file groups.
// But it's not possible to accurately get the file group count here so keeping these values large enough. This parallelism would
// any ways be limited by the executor counts.
private static final int MDT_DEFAULT_PARALLELISM = 512;
// File groups in each partition are fixed at creation time and we do not want them to be split into multiple files
// ever. Hence, we use a very large basefile size in metadata table. The actual size of the HFiles created will
// eventually depend on the number of file groups selected for each partition (See estimateFileGroupCount function)
private static final long MDT_MAX_HFILE_SIZE_BYTES = 10 * 1024 * 1024 * 1024L; // 10GB
/**
* Create a {@code HoodieWriteConfig} to use for the Metadata Table.
*
* @param writeConfig {@code HoodieWriteConfig} of the main dataset writer
* @param failedWritesCleaningPolicy Cleaning policy on failed writes
*/
@VisibleForTesting
public static HoodieWriteConfig createMetadataWriteConfig(
HoodieWriteConfig writeConfig, HoodieFailedWritesCleaningPolicy failedWritesCleaningPolicy,
HoodieTableVersion datatableVersion) {
String tableName = writeConfig.getTableName() + METADATA_TABLE_NAME_SUFFIX;
boolean isStreamingWritesToMetadataEnabled = writeConfig.isMetadataStreamingWritesEnabled(datatableVersion);
WriteConcurrencyMode concurrencyMode = isStreamingWritesToMetadataEnabled
? WriteConcurrencyMode.NON_BLOCKING_CONCURRENCY_CONTROL : WriteConcurrencyMode.SINGLE_WRITER;
HoodieLockConfig lockConfig = isStreamingWritesToMetadataEnabled
? HoodieLockConfig.newBuilder().withLockProvider(InProcessLockProvider.class).build() : HoodieLockConfig.newBuilder().build();
// HUDI-9407 tracks adding support for separate lock configuration for MDT. Until then, all writes to MDT will happen within data table lock.
if (isStreamingWritesToMetadataEnabled) {
failedWritesCleaningPolicy = HoodieFailedWritesCleaningPolicy.LAZY;
}
final long maxLogFileSizeBytes = writeConfig.getMetadataConfig().getMaxLogFileSize();
// Borrow the cleaner policy from the main table and adjust the cleaner policy based on the main table's cleaner policy
HoodieCleaningPolicy dataTableCleaningPolicy = writeConfig.getCleanerPolicy();
HoodieCleanConfig.Builder cleanConfigBuilder = HoodieCleanConfig.newBuilder()
.withAsyncClean(DEFAULT_METADATA_ASYNC_CLEAN)
.withAutoClean(false)
.withCleanerParallelism(MDT_DEFAULT_PARALLELISM)
.withFailedWritesCleaningPolicy(failedWritesCleaningPolicy)
.withCleanerPolicy(dataTableCleaningPolicy);
if (HoodieCleaningPolicy.KEEP_LATEST_COMMITS.equals(dataTableCleaningPolicy)) {
int retainCommits = (int) Math.max(DEFAULT_METADATA_CLEANER_COMMITS_RETAINED, writeConfig.getCleanerCommitsRetained() * 1.2);
cleanConfigBuilder.retainCommits(retainCommits);
} else if (HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS.equals(dataTableCleaningPolicy)) {
int retainFileVersions = (int) Math.ceil(writeConfig.getCleanerFileVersionsRetained() * 1.2);
cleanConfigBuilder.retainFileVersions(retainFileVersions);
} else if (HoodieCleaningPolicy.KEEP_LATEST_BY_HOURS.equals(dataTableCleaningPolicy)) {
int numHoursRetained = (int) Math.ceil(writeConfig.getCleanerHoursRetained() * 1.2);
cleanConfigBuilder.cleanerNumHoursRetained(numHoursRetained);
}
// Create the write config for the metadata table by borrowing options from the main write config.
HoodieWriteConfig.Builder builder = HoodieWriteConfig.newBuilder()
.withEngineType(writeConfig.getEngineType())
.withWriteTableVersion(writeConfig.getWriteVersion().versionCode())
.withMergeAllowDuplicateOnInserts(false)
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder()
.withConsistencyCheckEnabled(writeConfig.getConsistencyGuardConfig().isConsistencyCheckEnabled())
.withInitialConsistencyCheckIntervalMs(writeConfig.getConsistencyGuardConfig().getInitialConsistencyCheckIntervalMs())
.withMaxConsistencyCheckIntervalMs(writeConfig.getConsistencyGuardConfig().getMaxConsistencyCheckIntervalMs())
.withMaxConsistencyChecks(writeConfig.getConsistencyGuardConfig().getMaxConsistencyChecks())
.build())
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(false)
.withFileListingParallelism(writeConfig.getFileListingParallelism()).build())
.withAvroSchemaValidate(false)
.withEmbeddedTimelineServerEnabled(false)
.withMarkersType(MarkerType.DIRECT.name())
.withRollbackUsingMarkers(false)
.withPath(HoodieTableMetadata.getMetadataTableBasePath(writeConfig.getBasePath()))
.withSchema(HoodieMetadataRecord.getClassSchema().toString())
.forTable(tableName)
// we will trigger cleaning manually, to control the instant times
.withCleanConfig(cleanConfigBuilder.build())
// we will trigger archive manually, to ensure only regular writer invokes it
.withArchivalConfig(HoodieArchivalConfig.newBuilder()
.archiveCommitsWith(
writeConfig.getMinCommitsToKeep() + 1, writeConfig.getMaxCommitsToKeep() + 1)
.withAutoArchive(false)
.build())
// we will trigger compaction manually, to control the instant times
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
.withInlineCompaction(false)
.withMaxNumDeltaCommitsBeforeCompaction(writeConfig.getMetadataCompactDeltaCommitMax())
.withEnableOptimizedLogBlocksScan(String.valueOf(writeConfig.enableOptimizedLogBlocksScan()))
// Compaction on metadata table is used as a barrier for archiving on main dataset and for validating the
// deltacommits having corresponding completed commits. Therefore, we need to compact all fileslices of all
// partitions together requiring UnBoundedCompactionStrategy.
.withCompactionStrategy(new UnBoundedCompactionStrategy())
// Check if log compaction is enabled, this is needed for tables with a lot of records.
.withLogCompactionEnabled(writeConfig.isLogCompactionEnabledOnMetadata())
// Below config is only used if isLogCompactionEnabled is set.
.withLogCompactionBlocksThreshold(writeConfig.getMetadataLogCompactBlocksThreshold())
.build())
.withStorageConfig(HoodieStorageConfig.newBuilder().hfileMaxFileSize(MDT_MAX_HFILE_SIZE_BYTES)
.logFileMaxSize(maxLogFileSizeBytes)
// Keeping the log blocks as large as the log files themselves reduces the number of HFile blocks to be checked for
// presence of keys
.logFileDataBlockMaxSize(maxLogFileSizeBytes)
.withBloomFilterType(writeConfig.getMetadataConfig().getBloomFilterType())
.withBloomFilterNumEntries(writeConfig.getMetadataConfig().getBloomFilterNumEntries())
.withBloomFilterFpp(writeConfig.getMetadataConfig().getBloomFilterFpp())
.withBloomFilterDynamicMaxEntries(writeConfig.getMetadataConfig().getDynamicBloomFilterMaxNumEntries())
.build())
.withRollbackParallelism(MDT_DEFAULT_PARALLELISM)
.withFinalizeWriteParallelism(MDT_DEFAULT_PARALLELISM)
.withKeyGenerator(HoodieTableMetadataKeyGenerator.class.getCanonicalName())
.withPopulateMetaFields(DEFAULT_METADATA_POPULATE_META_FIELDS)
.withWriteStatusClass(FailOnFirstErrorWriteStatus.class)
.withReleaseResourceEnabled(writeConfig.areReleaseResourceEnabled())
.withRecordMergeMode(RecordMergeMode.CUSTOM)
.withRecordMergeStrategyId(HoodieRecordMerger.PAYLOAD_BASED_MERGE_STRATEGY_UUID)
.withPayloadConfig(HoodiePayloadConfig.newBuilder()
.withPayloadClass(HoodieMetadataPayload.class.getCanonicalName()).build())
.withRecordMergeImplClasses(HoodieAvroRecordMerger.class.getCanonicalName())
.withWriteRecordPositionsEnabled(false)
.withWriteConcurrencyMode(concurrencyMode)
.withLockConfig(lockConfig);
// RecordKey properties are needed for the metadata table records
final Properties properties = new Properties();
properties.put(HoodieTableConfig.TYPE.key(), HoodieTableType.MERGE_ON_READ.name());
properties.put(HoodieTableConfig.RECORDKEY_FIELDS.key(), RECORD_KEY_FIELD_NAME);
properties.put("hoodie.datasource.write.recordkey.field", RECORD_KEY_FIELD_NAME);
if (nonEmpty(writeConfig.getMetricReporterMetricsNamePrefix())) {
properties.put(HoodieMetricsConfig.METRICS_REPORTER_PREFIX.key(),
writeConfig.getMetricReporterMetricsNamePrefix() + METADATA_TABLE_NAME_SUFFIX);
}
// HFile caching properties
properties.put(HoodieReaderConfig.HFILE_BLOCK_CACHE_ENABLED.key(),
writeConfig.getBooleanOrDefault(HoodieReaderConfig.HFILE_BLOCK_CACHE_ENABLED));
properties.put(HoodieReaderConfig.HFILE_BLOCK_CACHE_SIZE.key(),
writeConfig.getIntOrDefault(HoodieReaderConfig.HFILE_BLOCK_CACHE_SIZE));
properties.put(HoodieReaderConfig.HFILE_BLOCK_CACHE_TTL_MINUTES.key(),
writeConfig.getIntOrDefault(HoodieReaderConfig.HFILE_BLOCK_CACHE_TTL_MINUTES));
builder.withProperties(properties);
if (writeConfig.isMetricsOn()) {
// Table Name is needed for metric reporters prefix
Properties commonProperties = new Properties();
commonProperties.put(HoodieWriteConfig.TBL_NAME.key(), tableName);
builder.withMetricsConfig(HoodieMetricsConfig.newBuilder()
.fromProperties(commonProperties)
.withReporterType(writeConfig.getMetricsReporterType().toString())
.withExecutorMetrics(writeConfig.isExecutorMetricsEnabled())
.withMetricsReporterMetricNamePrefix(writeConfig.getMetricReporterMetricsNamePrefix() + "_" + HoodieTableMetaClient.METADATA_STR)
.on(true).build());
switch (writeConfig.getMetricsReporterType()) {
case GRAPHITE:
builder.withMetricsGraphiteConfig(HoodieMetricsGraphiteConfig.newBuilder()
.onGraphitePort(writeConfig.getGraphiteServerPort())
.toGraphiteHost(writeConfig.getGraphiteServerHost())
.usePrefix(writeConfig.getGraphiteMetricPrefix()).build());
break;
case JMX:
builder.withMetricsJmxConfig(HoodieMetricsJmxConfig.newBuilder()
.onJmxPort(writeConfig.getJmxPort())
.toJmxHost(writeConfig.getJmxHost())
.build());
break;
case PROMETHEUS_PUSHGATEWAY:
HoodieMetricsPrometheusConfig pushGatewayConfig = HoodieMetricsPrometheusConfig.newBuilder()
.withPushgatewayJobname(writeConfig.getPushGatewayJobName())
.withPushgatewayRandomJobnameSuffix(writeConfig.getPushGatewayRandomJobNameSuffix())
.withPushgatewayLabels(writeConfig.getPushGatewayLabels())
.withPushgatewayReportPeriodInSeconds(String.valueOf(writeConfig.getPushGatewayReportPeriodSeconds()))
.withPushgatewayHostName(writeConfig.getPushGatewayHost())
.withPushgatewayPortNum(writeConfig.getPushGatewayPort()).build();
builder.withProperties(pushGatewayConfig.getProps());
break;
case M3:
HoodieMetricsM3Config m3Config = HoodieMetricsM3Config.newBuilder()
.onM3Port(writeConfig.getM3ServerPort())
.toM3Host(writeConfig.getM3ServerHost())
.useM3Tags(writeConfig.getM3Tags())
.useM3Service(writeConfig.getM3Service())
.useM3Env(writeConfig.getM3Env()).build();
builder.withProperties(m3Config.getProps());
break;
case DATADOG:
HoodieMetricsDatadogConfig.Builder datadogConfig = HoodieMetricsDatadogConfig.newBuilder()
.withDatadogApiKey(writeConfig.getDatadogApiKey())
.withDatadogApiKeySkipValidation(writeConfig.getDatadogApiKeySkipValidation())
.withDatadogPrefix(writeConfig.getDatadogMetricPrefix())
.withDatadogReportPeriodSeconds(writeConfig.getDatadogReportPeriodSeconds())
.withDatadogTags(String.join(",", writeConfig.getDatadogMetricTags()))
.withDatadogApiTimeoutSeconds(writeConfig.getDatadogApiTimeoutSeconds());
if (writeConfig.getDatadogMetricHost() != null) {
datadogConfig = datadogConfig.withDatadogHost(writeConfig.getDatadogMetricHost());
}
if (writeConfig.getDatadogApiSite() != null) {
datadogConfig = datadogConfig.withDatadogApiSite(writeConfig.getDatadogApiSite().name());
}
builder.withProperties(datadogConfig.build().getProps());
break;
case PROMETHEUS:
HoodieMetricsPrometheusConfig prometheusConfig = HoodieMetricsPrometheusConfig.newBuilder()
.withPushgatewayLabels(writeConfig.getPushGatewayLabels())
.withPrometheusPortNum(writeConfig.getPrometheusPort()).build();
builder.withProperties(prometheusConfig.getProps());
break;
case CONSOLE:
case INMEMORY:
case CLOUDWATCH:
break;
default:
throw new HoodieMetadataException("Unsupported Metrics Reporter type " + writeConfig.getMetricsReporterType());
}
}
HoodieWriteConfig metadataWriteConfig = builder.build();
// Inline compaction and auto clean is required as we do not expose this table outside
ValidationUtils.checkArgument(!metadataWriteConfig.isAutoClean(), "Cleaning is controlled internally for Metadata table.");
ValidationUtils.checkArgument(!metadataWriteConfig.inlineCompactionEnabled(), "Compaction is controlled internally for metadata table.");
ValidationUtils.checkArgument(metadataWriteConfig.getWriteStatusClassName().equals(FailOnFirstErrorWriteStatus.class.getName()),
"MDT should use " + FailOnFirstErrorWriteStatus.class.getName());
// Metadata Table cannot have metadata listing turned on. (infinite loop, much?)
ValidationUtils.checkArgument(!metadataWriteConfig.isMetadataTableEnabled(), "File listing cannot be used for Metadata Table");
return metadataWriteConfig;
}
/**
* Convert commit action to metadata records for the enabled partition types.
*
* @param context - Engine context to use
* @param dataWriteConfig - Hudi configs
* @param commitMetadata - Commit action metadata
* @param instantTime - Action instant time
* @param dataMetaClient - HoodieTableMetaClient for data
* @param tableMetadata
* @param metadataConfig - HoodieMetadataConfig
* @param enabledPartitionTypes - Set of enabled MDT partitions to update
* @param bloomFilterType - Type of generated bloom filter records
* @param bloomIndexParallelism - Parallelism for bloom filter record generation
* @param enableOptimizeLogBlocksScan - flag used to enable scanInternalV2 for log blocks in data table
* @return Map of partition to metadata records for the commit action
*/
public static Map<String, HoodieData<HoodieRecord>> convertMetadataToRecords(HoodieEngineContext context, HoodieWriteConfig dataWriteConfig, HoodieCommitMetadata commitMetadata,
String instantTime, HoodieTableMetaClient dataMetaClient, HoodieTableMetadata tableMetadata,
HoodieMetadataConfig metadataConfig, Set<String> enabledPartitionTypes, String bloomFilterType,
int bloomIndexParallelism, int writesFileIdEncoding, EngineType engineType,
Option<HoodieRecord.HoodieRecordType> recordTypeOpt, boolean enableOptimizeLogBlocksScan) {
final Map<String, HoodieData<HoodieRecord>> partitionToRecordsMap = new HashMap<>();
final HoodieData<HoodieRecord> filesPartitionRecordsRDD = context.parallelize(
convertMetadataToFilesPartitionRecords(commitMetadata, instantTime), 1);
partitionToRecordsMap.put(MetadataPartitionType.FILES.getPartitionPath(), filesPartitionRecordsRDD);
if (enabledPartitionTypes.contains(MetadataPartitionType.BLOOM_FILTERS.getPartitionPath())) {
final HoodieData<HoodieRecord> metadataBloomFilterRecords = convertMetadataToBloomFilterRecords(
context, dataWriteConfig, commitMetadata, instantTime, dataMetaClient, bloomFilterType, bloomIndexParallelism);
partitionToRecordsMap.put(MetadataPartitionType.BLOOM_FILTERS.getPartitionPath(), metadataBloomFilterRecords);
}
if (enabledPartitionTypes.contains(MetadataPartitionType.COLUMN_STATS.getPartitionPath())) {
final HoodieData<HoodieRecord> metadataColumnStatsRDD = convertMetadataToColumnStatsRecords(commitMetadata, context,
dataMetaClient, metadataConfig, recordTypeOpt);
partitionToRecordsMap.put(MetadataPartitionType.COLUMN_STATS.getPartitionPath(), metadataColumnStatsRDD);
}
if (enabledPartitionTypes.contains(MetadataPartitionType.PARTITION_STATS.getPartitionPath())) {
checkState(MetadataPartitionType.COLUMN_STATS.isMetadataPartitionAvailable(dataMetaClient),
"Column stats partition must be enabled to generate partition stats. Please enable: " + HoodieMetadataConfig.ENABLE_METADATA_INDEX_COLUMN_STATS.key());
// Generate Hoodie Pair data of partition name and list of column range metadata for all the files in that partition
boolean isDeletePartition = commitMetadata.getOperationType().equals(WriteOperationType.DELETE_PARTITION);
final HoodieData<HoodieRecord> partitionStatsRDD = convertMetadataToPartitionStatRecords(
commitMetadata, instantTime, context, dataWriteConfig, dataMetaClient, tableMetadata, metadataConfig, recordTypeOpt, isDeletePartition);
partitionToRecordsMap.put(MetadataPartitionType.PARTITION_STATS.getPartitionPath(), partitionStatsRDD);
}
if (enabledPartitionTypes.contains(MetadataPartitionType.RECORD_INDEX.getPartitionPath())) {
partitionToRecordsMap.put(MetadataPartitionType.RECORD_INDEX.getPartitionPath(), convertMetadataToRecordIndexRecords(context, commitMetadata, metadataConfig,
dataMetaClient, writesFileIdEncoding, instantTime, engineType, enableOptimizeLogBlocksScan));
}
return partitionToRecordsMap;
}
public static HoodieData<HoodieRecord> convertMetadataToPartitionStatRecords(HoodieCommitMetadata commitMetadata, String instantTime,
HoodieEngineContext engineContext, HoodieWriteConfig dataWriteConfig,
HoodieTableMetaClient dataMetaClient,
HoodieTableMetadata tableMetadata, HoodieMetadataConfig metadataConfig,
Option<HoodieRecord.HoodieRecordType> recordTypeOpt, boolean isDeletePartition) {
try {
Option<Schema> writerSchema =
Option.ofNullable(commitMetadata.getMetadata(HoodieCommitMetadata.SCHEMA_KEY))
.flatMap(writerSchemaStr ->
isNullOrEmpty(writerSchemaStr)
? Option.empty()
: Option.of(new Schema.Parser().parse(writerSchemaStr)));
HoodieTableConfig tableConfig = dataMetaClient.getTableConfig();
Option<Schema> tableSchema = writerSchema.map(schema -> tableConfig.populateMetaFields() ? addMetadataFields(schema) : schema);
if (tableSchema.isEmpty()) {
return engineContext.emptyHoodieData();
}
HoodieIndexVersion partitionStatsIndexVersion = existingIndexVersionOrDefault(PARTITION_NAME_PARTITION_STATS, dataMetaClient);
Lazy<Option<Schema>> writerSchemaOpt = Lazy.eagerly(tableSchema);
Map<String, Schema> columnsToIndexSchemaMap = getColumnsToIndex(dataMetaClient.getTableConfig(), metadataConfig, writerSchemaOpt, false, recordTypeOpt, partitionStatsIndexVersion);
if (columnsToIndexSchemaMap.isEmpty()) {
return engineContext.emptyHoodieData();
}
// if this is DELETE_PARTITION, then create delete metadata payload for all columns for partition_stats
if (isDeletePartition) {
HoodieReplaceCommitMetadata replaceCommitMetadata = (HoodieReplaceCommitMetadata) commitMetadata;
Map<String, List<String>> partitionToReplaceFileIds = replaceCommitMetadata.getPartitionToReplaceFileIds();
List<String> partitionsToDelete = new ArrayList<>(partitionToReplaceFileIds.keySet());
if (partitionToReplaceFileIds.isEmpty()) {
return engineContext.emptyHoodieData();
}
return engineContext.parallelize(partitionsToDelete, partitionsToDelete.size()).flatMap(partition -> {
Stream<HoodieRecord> columnRangeMetadata = columnsToIndexSchemaMap.keySet().stream()
.flatMap(column -> HoodieMetadataPayload.createPartitionStatsRecords(
partition,
Collections.singletonList(HoodieColumnRangeMetadata.stub("", column, partitionStatsIndexVersion)),
true, true, Option.empty()));
return columnRangeMetadata.iterator();
});
}
// In this function we fetch column range metadata for all new files part of commit metadata along with all the other files
// of the affected partitions. The column range metadata is grouped by partition name to generate HoodiePairData of partition name
// and list of column range metadata for that partition files. This pair data is then used to generate partition stat records.
List<HoodieWriteStat> allWriteStats = commitMetadata.getPartitionToWriteStats().values().stream()
.flatMap(Collection::stream).collect(Collectors.toList());
if (allWriteStats.isEmpty()) {
return engineContext.emptyHoodieData();
}
List<String> colsToIndex = new ArrayList<>(columnsToIndexSchemaMap.keySet());
LOG.debug("Indexing following columns for partition stats index: {}", columnsToIndexSchemaMap.keySet());
// Group by partitionPath and then gather write stats lists,
// where each inner list contains HoodieWriteStat objects that have the same partitionPath.
List<List<HoodieWriteStat>> partitionedWriteStats = new ArrayList<>(allWriteStats.stream()
.collect(Collectors.groupingBy(HoodieWriteStat::getPartitionPath))
.values());
Map<String, Set<String>> fileGroupIdsToReplaceMap = (commitMetadata instanceof HoodieReplaceCommitMetadata)
? ((HoodieReplaceCommitMetadata) commitMetadata).getPartitionToReplaceFileIds()
.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> new HashSet<>(e.getValue())))
: Collections.emptyMap();
int parallelism = Math.max(Math.min(partitionedWriteStats.size(), metadataConfig.getPartitionStatsIndexParallelism()), 1);
String maxInstantTime = getMaxInstantTime(dataMetaClient, instantTime);
HoodiePairData<String, List<HoodieColumnRangeMetadata<Comparable>>> columnRangeMetadata =
engineContext.parallelize(partitionedWriteStats, parallelism).mapToPair(partitionedWriteStat -> {
final String partitionName = partitionedWriteStat.get(0).getPartitionPath();
checkState(tableMetadata != null, "tableMetadata should not be null when scanning metadata table");
// Collect column metadata for each file part of the latest merged file slice before the current instant time
List<HoodieColumnRangeMetadata<Comparable>> fileColumnMetadata = partitionedWriteStat.stream()
.flatMap(writeStat -> translateWriteStatToFileStats(writeStat, dataMetaClient, colsToIndex, partitionStatsIndexVersion).stream()).collect(toList());
// Collect column metadata of each file that does not have column stats provided by the write stat in the commit metadata
Set<String> filesToFetchColumnStats = getFilesToFetchColumnStats(partitionedWriteStat, dataMetaClient, tableMetadata, dataWriteConfig, partitionName, maxInstantTime,
instantTime, fileGroupIdsToReplaceMap, colsToIndex, partitionStatsIndexVersion);
// Fetch metadata table COLUMN_STATS partition records for the above files
List<HoodieColumnRangeMetadata<Comparable>> partitionColumnMetadata = tableMetadata
.getRecordsByKeyPrefixes(
HoodieListData.lazy(generateColumnStatsKeys(colsToIndex, partitionName)),
MetadataPartitionType.COLUMN_STATS.getPartitionPath(), false)
// schema and properties are ignored in getInsertValue, so simply pass as null
.map(record -> ((HoodieMetadataPayload) record.getData()).getColumnStatMetadata())
.filter(Option::isPresent)
.map(colStatsOpt -> colStatsOpt.get())
.filter(stats -> filesToFetchColumnStats.contains(stats.getFileName()))
.map(HoodieColumnRangeMetadata::fromColumnStats).collectAsList();
// fileColumnMetadata already contains stats for the files from the current inflight commit.
// Here it adds the stats for the commited files part of the latest merged file slices
fileColumnMetadata.addAll(partitionColumnMetadata);
return Pair.of(partitionName, fileColumnMetadata);
});
return convertMetadataToPartitionStatsRecords(columnRangeMetadata, dataMetaClient, columnsToIndexSchemaMap, partitionStatsIndexVersion);
} catch (Exception e) {
throw new HoodieException("Failed to generate column stats records for metadata table", e);
}
}
private static StoragePathInfo getBaseFileStoragePathInfo(HoodieBaseFile baseFile) {
StoragePathInfo pathInfo = baseFile.getPathInfo();
if (pathInfo != null) {
return pathInfo;
}
return new StoragePathInfo(baseFile.getStoragePath(), baseFile.getFileLen(), false, (short) 0, 0, 0);
}
private static StoragePathInfo getLogFileStoragePathInfo(HoodieLogFile logFile) {
StoragePathInfo pathInfo = logFile.getPathInfo();
if (pathInfo != null) {
return pathInfo;
}
return new StoragePathInfo(logFile.getPath(), logFile.getFileSize(), false, (short) 0, 0, 0);
}
public static String getMaxInstantTime(HoodieTableMetaClient dataMetaClient, String instantTime) {
Option<String> lastCompletedInstant = dataMetaClient.getActiveTimeline().filterCompletedInstants()
.lastInstant()
.map(HoodieInstant::requestedTime);
return lastCompletedInstant.map(lastCompletedInstantTime ->
lastCompletedInstantTime.compareTo(instantTime) > 0 ? lastCompletedInstantTime : instantTime).orElse(instantTime);
}
/**
* Collect column metadata of each file that does not have column stats provided by the write stat in the commit metadata
*/
public static Set<String> getFilesToFetchColumnStats(List<HoodieWriteStat> partitionedWriteStat,
HoodieTableMetaClient dataMetaClient,
HoodieTableMetadata tableMetadata,
HoodieWriteConfig dataWriteConfig,
String partitionName,
String maxInstantTime,
String instantTime,
Map<String, Set<String>> fileGroupIdsToReplaceMap,
List<String> colsToIndex,
HoodieIndexVersion partitionStatsIndexVersion) {
// Get the latest merged file slices based on the commited files part of the latest snapshot and the new files of the current commit metadata
List<StoragePathInfo> consolidatedPathInfos = new ArrayList<>();
partitionedWriteStat.forEach(
stat -> consolidatedPathInfos.add(
new StoragePathInfo(new StoragePath(dataMetaClient.getBasePath(), stat.getPath()), stat.getFileSizeInBytes(), false, (short) 0, 0, 0)));
SyncableFileSystemView fileSystemViewForCommitedFiles =
FileSystemViewManager.createViewManager(new HoodieLocalEngineContext(dataMetaClient.getStorageConf()),
dataWriteConfig.getMetadataConfig(), dataWriteConfig.getViewStorageConfig(), dataWriteConfig.getCommonConfig(),
unused -> tableMetadata).getFileSystemView(dataMetaClient);
fileSystemViewForCommitedFiles.getLatestMergedFileSlicesBeforeOrOn(partitionName, maxInstantTime)
.forEach(fileSlice -> {
if (fileSlice.getBaseFile().isPresent()) {
consolidatedPathInfos.add(getBaseFileStoragePathInfo(fileSlice.getBaseFile().get()));
}
fileSlice.getLogFiles().forEach(logFile -> consolidatedPathInfos.add(getLogFileStoragePathInfo(logFile)));
});
SpillableMapBasedFileSystemView consolidatedFileSystemView = new SpillableMapBasedFileSystemView(
tableMetadata, dataMetaClient, dataMetaClient.getActiveTimeline(),
consolidatedPathInfos, dataWriteConfig.getViewStorageConfig(), dataWriteConfig.getCommonConfig());
// Collect column metadata for each file part of the latest merged file slice before the current instant time
List<HoodieColumnRangeMetadata<Comparable>> fileColumnMetadata = partitionedWriteStat.stream()
.flatMap(writeStat -> translateWriteStatToFileStats(writeStat, dataMetaClient, colsToIndex, partitionStatsIndexVersion).stream()).collect(toList());
Set<String> fileGroupIdsToReplace = fileGroupIdsToReplaceMap.getOrDefault(partitionName, Collections.emptySet());
Set<String> filesWithColumnStats = partitionedWriteStat.stream()
.map(stat -> new StoragePath(stat.getPath()).getName()).collect(Collectors.toSet());
// Collect column metadata of each file that does not have column stats provided by the write stat in the commit metadata
return consolidatedFileSystemView.getLatestMergedFileSlicesBeforeOrOnIncludingInflight(partitionName, maxInstantTime, instantTime)
.flatMap(fileSlice -> Stream.concat(
Stream.of(fileSlice.getBaseFile().map(HoodieBaseFile::getFileName).orElse(null)),
fileSlice.getLogFiles().map(HoodieLogFile::getFileName)))
.filter(e -> Objects.nonNull(e) && !filesWithColumnStats.contains(e) && !fileGroupIdsToReplace.contains(e))
.collect(Collectors.toSet());
}
}
|
googleapis/google-cloud-java | 37,461 | java-retail/proto-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/ListCatalogsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2alpha/catalog_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2alpha;
/**
*
*
* <pre>
* Response for
* [CatalogService.ListCatalogs][google.cloud.retail.v2alpha.CatalogService.ListCatalogs]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2alpha.ListCatalogsResponse}
*/
public final class ListCatalogsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2alpha.ListCatalogsResponse)
ListCatalogsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCatalogsResponse.newBuilder() to construct.
private ListCatalogsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCatalogsResponse() {
catalogs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCatalogsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2alpha.CatalogServiceProto
.internal_static_google_cloud_retail_v2alpha_ListCatalogsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2alpha.CatalogServiceProto
.internal_static_google_cloud_retail_v2alpha_ListCatalogsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2alpha.ListCatalogsResponse.class,
com.google.cloud.retail.v2alpha.ListCatalogsResponse.Builder.class);
}
public static final int CATALOGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2alpha.Catalog> catalogs_;
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2alpha.Catalog> getCatalogsList() {
return catalogs_;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2alpha.CatalogOrBuilder>
getCatalogsOrBuilderList() {
return catalogs_;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public int getCatalogsCount() {
return catalogs_.size();
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2alpha.Catalog getCatalogs(int index) {
return catalogs_.get(index);
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2alpha.CatalogOrBuilder getCatalogsOrBuilder(int index) {
return catalogs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < catalogs_.size(); i++) {
output.writeMessage(1, catalogs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < catalogs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, catalogs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2alpha.ListCatalogsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2alpha.ListCatalogsResponse other =
(com.google.cloud.retail.v2alpha.ListCatalogsResponse) obj;
if (!getCatalogsList().equals(other.getCatalogsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCatalogsCount() > 0) {
hash = (37 * hash) + CATALOGS_FIELD_NUMBER;
hash = (53 * hash) + getCatalogsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2alpha.ListCatalogsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for
* [CatalogService.ListCatalogs][google.cloud.retail.v2alpha.CatalogService.ListCatalogs]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2alpha.ListCatalogsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2alpha.ListCatalogsResponse)
com.google.cloud.retail.v2alpha.ListCatalogsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2alpha.CatalogServiceProto
.internal_static_google_cloud_retail_v2alpha_ListCatalogsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2alpha.CatalogServiceProto
.internal_static_google_cloud_retail_v2alpha_ListCatalogsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2alpha.ListCatalogsResponse.class,
com.google.cloud.retail.v2alpha.ListCatalogsResponse.Builder.class);
}
// Construct using com.google.cloud.retail.v2alpha.ListCatalogsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (catalogsBuilder_ == null) {
catalogs_ = java.util.Collections.emptyList();
} else {
catalogs_ = null;
catalogsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2alpha.CatalogServiceProto
.internal_static_google_cloud_retail_v2alpha_ListCatalogsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListCatalogsResponse getDefaultInstanceForType() {
return com.google.cloud.retail.v2alpha.ListCatalogsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListCatalogsResponse build() {
com.google.cloud.retail.v2alpha.ListCatalogsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListCatalogsResponse buildPartial() {
com.google.cloud.retail.v2alpha.ListCatalogsResponse result =
new com.google.cloud.retail.v2alpha.ListCatalogsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2alpha.ListCatalogsResponse result) {
if (catalogsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
catalogs_ = java.util.Collections.unmodifiableList(catalogs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.catalogs_ = catalogs_;
} else {
result.catalogs_ = catalogsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.retail.v2alpha.ListCatalogsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2alpha.ListCatalogsResponse) {
return mergeFrom((com.google.cloud.retail.v2alpha.ListCatalogsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2alpha.ListCatalogsResponse other) {
if (other == com.google.cloud.retail.v2alpha.ListCatalogsResponse.getDefaultInstance())
return this;
if (catalogsBuilder_ == null) {
if (!other.catalogs_.isEmpty()) {
if (catalogs_.isEmpty()) {
catalogs_ = other.catalogs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCatalogsIsMutable();
catalogs_.addAll(other.catalogs_);
}
onChanged();
}
} else {
if (!other.catalogs_.isEmpty()) {
if (catalogsBuilder_.isEmpty()) {
catalogsBuilder_.dispose();
catalogsBuilder_ = null;
catalogs_ = other.catalogs_;
bitField0_ = (bitField0_ & ~0x00000001);
catalogsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCatalogsFieldBuilder()
: null;
} else {
catalogsBuilder_.addAllMessages(other.catalogs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2alpha.Catalog m =
input.readMessage(
com.google.cloud.retail.v2alpha.Catalog.parser(), extensionRegistry);
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(m);
} else {
catalogsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2alpha.Catalog> catalogs_ =
java.util.Collections.emptyList();
private void ensureCatalogsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
catalogs_ = new java.util.ArrayList<com.google.cloud.retail.v2alpha.Catalog>(catalogs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.Catalog,
com.google.cloud.retail.v2alpha.Catalog.Builder,
com.google.cloud.retail.v2alpha.CatalogOrBuilder>
catalogsBuilder_;
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2alpha.Catalog> getCatalogsList() {
if (catalogsBuilder_ == null) {
return java.util.Collections.unmodifiableList(catalogs_);
} else {
return catalogsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public int getCatalogsCount() {
if (catalogsBuilder_ == null) {
return catalogs_.size();
} else {
return catalogsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.Catalog getCatalogs(int index) {
if (catalogsBuilder_ == null) {
return catalogs_.get(index);
} else {
return catalogsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder setCatalogs(int index, com.google.cloud.retail.v2alpha.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.set(index, value);
onChanged();
} else {
catalogsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder setCatalogs(
int index, com.google.cloud.retail.v2alpha.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.set(index, builderForValue.build());
onChanged();
} else {
catalogsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(com.google.cloud.retail.v2alpha.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.add(value);
onChanged();
} else {
catalogsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(int index, com.google.cloud.retail.v2alpha.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.add(index, value);
onChanged();
} else {
catalogsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(com.google.cloud.retail.v2alpha.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(builderForValue.build());
onChanged();
} else {
catalogsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(
int index, com.google.cloud.retail.v2alpha.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(index, builderForValue.build());
onChanged();
} else {
catalogsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder addAllCatalogs(
java.lang.Iterable<? extends com.google.cloud.retail.v2alpha.Catalog> values) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, catalogs_);
onChanged();
} else {
catalogsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder clearCatalogs() {
if (catalogsBuilder_ == null) {
catalogs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
catalogsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public Builder removeCatalogs(int index) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.remove(index);
onChanged();
} else {
catalogsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.Catalog.Builder getCatalogsBuilder(int index) {
return getCatalogsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.CatalogOrBuilder getCatalogsOrBuilder(int index) {
if (catalogsBuilder_ == null) {
return catalogs_.get(index);
} else {
return catalogsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.retail.v2alpha.CatalogOrBuilder>
getCatalogsOrBuilderList() {
if (catalogsBuilder_ != null) {
return catalogsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(catalogs_);
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.Catalog.Builder addCatalogsBuilder() {
return getCatalogsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2alpha.Catalog.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.Catalog.Builder addCatalogsBuilder(int index) {
return getCatalogsFieldBuilder()
.addBuilder(index, com.google.cloud.retail.v2alpha.Catalog.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2alpha.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.Catalog catalogs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2alpha.Catalog.Builder>
getCatalogsBuilderList() {
return getCatalogsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.Catalog,
com.google.cloud.retail.v2alpha.Catalog.Builder,
com.google.cloud.retail.v2alpha.CatalogOrBuilder>
getCatalogsFieldBuilder() {
if (catalogsBuilder_ == null) {
catalogsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.Catalog,
com.google.cloud.retail.v2alpha.Catalog.Builder,
com.google.cloud.retail.v2alpha.CatalogOrBuilder>(
catalogs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
catalogs_ = null;
}
return catalogsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2alpha.ListCatalogsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2alpha.ListCatalogsResponse)
private static final com.google.cloud.retail.v2alpha.ListCatalogsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2alpha.ListCatalogsResponse();
}
public static com.google.cloud.retail.v2alpha.ListCatalogsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCatalogsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListCatalogsResponse>() {
@java.lang.Override
public ListCatalogsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCatalogsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCatalogsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListCatalogsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/tez | 37,583 | tez-runtime-library/src/main/java/org/apache/tez/runtime/library/common/sort/impl/IFile.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.runtime.library.common.sort.impl;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BoundedByteArrayOutputStream;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.serializer.Serialization;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.tez.common.counters.TezCounter;
import org.apache.tez.runtime.library.common.task.local.output.TezTaskOutput;
import org.apache.tez.runtime.library.utils.BufferUtils;
import org.apache.tez.runtime.library.utils.CodecUtils;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>IFile</code> is the simple <key-len, value-len, key, value> format
* for the intermediate map-outputs in Map-Reduce.
*
* There is a <code>Writer</code> to write out map-outputs in this format and
* a <code>Reader</code> to read files of this format.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public final class IFile {
private static final Logger LOG = LoggerFactory.getLogger(IFile.class);
public static final int EOF_MARKER = -1; // End of File Marker
public static final int RLE_MARKER = -2; // Repeat same key marker
public static final int V_END_MARKER = -3; // End of values marker
public static final DataInputBuffer REPEAT_KEY = new DataInputBuffer();
static final byte[] HEADER = new byte[] { (byte) 'T', (byte) 'I',
(byte) 'F' , (byte) 0};
private static final String INCOMPLETE_READ = "Requested to read %d got %d";
private static final String REQ_BUFFER_SIZE_TOO_LARGE = "Size of data %d is greater than the max allowed of %d";
private IFile() {}
/**
* IFileWriter which stores data in memory for specified limit, beyond
* which it falls back to file based writer. It creates files lazily on
* need basis and avoids any disk hit (in cases, where data fits entirely in mem).
* <p>
* This class should not make any changes to IFile logic and should just flip streams
* from mem to disk on need basis.
*
* During write, it verifies whether uncompressed payload can fit in memory. If so, it would
* store in buffer. Otherwise, it falls back to file based writer. Note that data stored
* internally would be in compressed format (if codec is provided). However, for easier
* comparison and spill over, uncompressed payload check is done. This is
* done intentionally, as it is not possible to know compressed data length
* upfront.
*/
public static class FileBackedInMemIFileWriter extends Writer {
private final FileSystem fs;
private boolean bufferFull;
// For lazy creation of file
private final TezTaskOutput taskOutput;
private int totalSize;
private Path outputPath;
private final CompressionCodec fileCodec;
private final BoundedByteArrayOutputStream cacheStream;
private static final int checksumSize = IFileOutputStream.getCheckSumSize();
/**
* Note that we do not allow compression in in-mem stream.
* When spilled over to file, compression gets enabled.
*/
public FileBackedInMemIFileWriter(Serialization<?> keySerialization,
Serialization<?> valSerialization, FileSystem fs, TezTaskOutput taskOutput,
Class<?> keyClass, Class<?> valueClass, CompressionCodec codec, TezCounter writesCounter,
TezCounter serializedBytesCounter, int cacheSize) throws IOException {
super(keySerialization, valSerialization, new FSDataOutputStream(createBoundedBuffer(cacheSize), null),
keyClass, valueClass, null, writesCounter, serializedBytesCounter);
this.fs = fs;
this.cacheStream = (BoundedByteArrayOutputStream) this.rawOut.getWrappedStream();
this.taskOutput = taskOutput;
this.bufferFull = (cacheStream == null);
this.totalSize = getBaseCacheSize();
this.fileCodec = codec;
}
/**
* For basic cache size checks: header + checksum + EOF marker
*
* @return size of the base cache needed
*/
static int getBaseCacheSize() {
return (HEADER.length + checksumSize
+ (2 * WritableUtils.getVIntSize(EOF_MARKER)));
}
boolean shouldWriteToDisk() {
return totalSize >= cacheStream.getLimit();
}
/**
* Create in mem stream. In it is too small, adjust it's size
*
* @return in memory stream
*/
public static BoundedByteArrayOutputStream createBoundedBuffer(int size) {
int resize = Math.max(getBaseCacheSize(), size);
return new BoundedByteArrayOutputStream(resize);
}
/**
* Flip over from memory to file based writer.
*
* 1. Content format: HEADER + real data + CHECKSUM. Checksum is for real
* data.
* 2. Before flipping, close checksum stream, so that checksum is written
* out.
* 3. Create relevant file based writer.
* 4. Write header and then real data.
*/
private void resetToFileBasedWriter() throws IOException {
// Close out stream, so that data checksums are written.
// Buf contents = HEADER + real data + CHECKSUM
this.out.close();
// Get the buffer which contains data in memory
BoundedByteArrayOutputStream bout =
(BoundedByteArrayOutputStream) this.rawOut.getWrappedStream();
// Create new file based writer
if (outputPath == null) {
outputPath = taskOutput.getOutputFileForWrite();
}
LOG.info("Switching from mem stream to disk stream. File: " + outputPath);
FSDataOutputStream newRawOut = fs.create(outputPath);
this.rawOut = newRawOut;
this.ownOutputStream = true;
setupOutputStream(fileCodec);
// Write header to file
headerWritten = false;
writeHeader(newRawOut);
// write real data
int sPos = HEADER.length;
int len = (bout.size() - checksumSize - HEADER.length);
this.out.write(bout.getBuffer(), sPos, len);
bufferFull = true;
bout.reset();
}
@Override
protected void writeKVPair(byte[] keyData, int keyPos, int keyLength,
byte[] valueData, int valPos, int valueLength) throws IOException {
if (!bufferFull) {
// Compute actual payload size: write RLE marker, length info and then entire data.
totalSize += ((prevKey == REPEAT_KEY) ? V_END_MARKER_SIZE : 0)
+ WritableUtils.getVIntSize(keyLength) + keyLength
+ WritableUtils.getVIntSize(valueLength) + valueLength;
if (shouldWriteToDisk()) {
resetToFileBasedWriter();
}
}
super.writeKVPair(keyData, keyPos, keyLength, valueData, valPos, valueLength);
}
@Override
protected void writeValue(byte[] data, int offset, int length) throws IOException {
if (!bufferFull) {
totalSize += ((prevKey != REPEAT_KEY) ? RLE_MARKER_SIZE : 0)
+ WritableUtils.getVIntSize(length) + length;
if (shouldWriteToDisk()) {
resetToFileBasedWriter();
}
}
super.writeValue(data, offset, length);
}
/**
* Check if data was flushed to disk.
*
* @return whether data is flushed to disk ot not
*/
public boolean isDataFlushedToDisk() {
return bufferFull;
}
/**
* Get cached data if any
*
* @return if data is not flushed to disk, it returns in-mem contents
*/
public ByteBuffer getData() {
if (!isDataFlushedToDisk()) {
return ByteBuffer.wrap(cacheStream.getBuffer(), 0, cacheStream.size());
}
return null;
}
@VisibleForTesting
void setOutputPath(Path outputPath) {
this.outputPath = outputPath;
}
public Path getOutputPath() {
return this.outputPath;
}
}
/**
* <code>IFile.Writer</code> to write out intermediate map-outputs.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
@SuppressWarnings({"unchecked", "rawtypes"})
public static class Writer {
protected DataOutputStream out;
boolean ownOutputStream = false;
long start = 0;
FSDataOutputStream rawOut;
final AtomicBoolean closed = new AtomicBoolean(false);
CompressionOutputStream compressedOut;
Compressor compressor;
boolean compressOutput = false;
long decompressedBytesWritten = 0;
long compressedBytesWritten = 0;
// Count records written to disk
private long numRecordsWritten = 0;
private long rleWritten = 0; //number of RLE markers written
private long totalKeySaving = 0; //number of keys saved due to multi KV writes + RLE
private final TezCounter writtenRecordsCounter;
private final TezCounter serializedUncompressedBytes;
IFileOutputStream checksumOut;
boolean closeSerializers = false;
Serializer keySerializer = null;
Serializer valueSerializer = null;
final DataOutputBuffer buffer = new DataOutputBuffer();
final DataOutputBuffer previous = new DataOutputBuffer();
Object prevKey = null;
boolean headerWritten = false;
@VisibleForTesting
boolean sameKey = false;
final int RLE_MARKER_SIZE = WritableUtils.getVIntSize(RLE_MARKER);
final int V_END_MARKER_SIZE = WritableUtils.getVIntSize(V_END_MARKER);
// de-dup keys or not
protected final boolean rle;
public Writer(Serialization keySerialization, Serialization valSerialization, FileSystem fs, Path file,
Class keyClass, Class valueClass,
CompressionCodec codec,
TezCounter writesCounter,
TezCounter serializedBytesCounter) throws IOException {
this(keySerialization, valSerialization, fs.create(file), keyClass, valueClass, codec,
writesCounter, serializedBytesCounter);
ownOutputStream = true;
}
protected Writer(TezCounter writesCounter, TezCounter serializedBytesCounter, boolean rle) {
writtenRecordsCounter = writesCounter;
serializedUncompressedBytes = serializedBytesCounter;
this.rle = rle;
}
public Writer(Serialization keySerialization, Serialization valSerialization, FSDataOutputStream outputStream,
Class keyClass, Class valueClass, CompressionCodec codec, TezCounter writesCounter,
TezCounter serializedBytesCounter) throws IOException {
this(keySerialization, valSerialization, outputStream, keyClass, valueClass, codec, writesCounter,
serializedBytesCounter, false);
}
public Writer(Serialization keySerialization, Serialization valSerialization, FSDataOutputStream outputStream,
Class keyClass, Class valueClass,
CompressionCodec codec, TezCounter writesCounter, TezCounter serializedBytesCounter,
boolean rle) throws IOException {
this.rawOut = outputStream;
this.writtenRecordsCounter = writesCounter;
this.serializedUncompressedBytes = serializedBytesCounter;
this.start = this.rawOut.getPos();
this.rle = rle;
setupOutputStream(codec);
writeHeader(outputStream);
if (keyClass != null) {
this.closeSerializers = true;
this.keySerializer = keySerialization.getSerializer(keyClass);
this.keySerializer.open(buffer);
this.valueSerializer = valSerialization.getSerializer(valueClass);
this.valueSerializer.open(buffer);
} else {
this.closeSerializers = false;
}
}
void setupOutputStream(CompressionCodec codec) throws IOException {
this.checksumOut = new IFileOutputStream(this.rawOut);
if (codec != null) {
this.compressor = CodecUtils.getCompressor(codec);
if (this.compressor != null) {
this.compressor.reset();
this.compressedOut = CodecUtils.createOutputStream(codec, checksumOut, compressor);
this.out = new FSDataOutputStream(this.compressedOut, null);
this.compressOutput = true;
} else {
LOG.warn("Could not obtain compressor from CodecPool");
this.out = new FSDataOutputStream(checksumOut,null);
}
} else {
this.out = new FSDataOutputStream(checksumOut,null);
}
}
public Writer(Serialization keySerialization, Serialization valSerialization, FileSystem fs, Path file) throws IOException {
this(keySerialization, valSerialization, fs, file, null, null, null, null, null);
}
protected void writeHeader(OutputStream outputStream) throws IOException {
if (!headerWritten) {
outputStream.write(HEADER, 0, HEADER.length - 1);
outputStream.write((compressOutput) ? (byte) 1 : (byte) 0);
headerWritten = true;
}
}
public void close() throws IOException {
if (closed.getAndSet(true)) {
throw new IOException("Writer was already closed earlier");
}
// When IFile writer is created by BackupStore, we do not have
// Key and Value classes set. So, check before closing the
// serializers
if (closeSerializers) {
keySerializer.close();
valueSerializer.close();
}
// write V_END_MARKER as needed
writeValueMarker(out);
// Write EOF_MARKER for key/value length
WritableUtils.writeVInt(out, EOF_MARKER);
WritableUtils.writeVInt(out, EOF_MARKER);
decompressedBytesWritten += 2L * WritableUtils.getVIntSize(EOF_MARKER);
//account for header bytes
decompressedBytesWritten += HEADER.length;
// Close the underlying stream iff we own it...
if (ownOutputStream) {
out.close();
} else {
if (compressOutput) {
// Flush
compressedOut.finish();
compressedOut.resetState();
}
// Write the checksum and flush the buffer
checksumOut.finish();
}
//header bytes are already included in rawOut
compressedBytesWritten = rawOut.getPos() - start;
if (compressOutput) {
// Return back the compressor
CodecPool.returnCompressor(compressor);
compressor = null;
}
out = null;
if (writtenRecordsCounter != null) {
writtenRecordsCounter.increment(numRecordsWritten);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Total keys written=" + numRecordsWritten + "; rleEnabled=" + rle + "; Savings" +
"(due to multi-kv/rle)=" + totalKeySaving + "; number of RLEs written=" +
rleWritten + "; compressedLen=" + compressedBytesWritten + "; rawLen="
+ decompressedBytesWritten);
}
}
/**
* Send key/value to be appended to IFile. To represent same key as previous
* one, send IFile.REPEAT_KEY as key parameter. Should not call this method with
* IFile.REPEAT_KEY as the first key. It is caller's responsibility to ensure that correct
* key/value type checks and key/value length (non-negative) checks are done properly.
*/
public void append(Object key, Object value) throws IOException {
int keyLength = 0;
sameKey = (key == REPEAT_KEY);
if (!sameKey) {
keySerializer.serialize(key);
keyLength = buffer.getLength();
assert(keyLength >= 0);
if (rle && (keyLength == previous.getLength())) {
sameKey = (BufferUtils.compare(previous, buffer) == 0);
}
}
// Append the 'value'
valueSerializer.serialize(value);
int valueLength = buffer.getLength() - keyLength;
assert(valueLength >= 0);
if (!sameKey) {
//dump entire key value pair
writeKVPair(buffer.getData(), 0, keyLength, buffer.getData(),
keyLength, buffer.getLength() - keyLength);
if (rle) {
previous.reset();
previous.write(buffer.getData(), 0, keyLength); //store the key
}
} else {
writeValue(buffer.getData(), keyLength, valueLength);
}
prevKey = (sameKey) ? REPEAT_KEY : key;
// Reset
buffer.reset();
++numRecordsWritten;
}
/**
* Appends the value to previous key. Assumes that the caller has already done relevant checks
* for identical keys. Also, no validations are done in this method
*/
public void appendValue(Object value) throws IOException {
valueSerializer.serialize(value);
int valueLength = buffer.getLength();
writeValue(buffer.getData(), 0, valueLength);
buffer.reset();
++numRecordsWritten;
prevKey = REPEAT_KEY;
}
/**
* Appends the value to previous key. Assumes that the caller has already done relevant checks
* for identical keys. Also, no validations are done in this method. It is caller's responsibility
* to pass non-negative key/value lengths. Otherwise,IndexOutOfBoundsException could be
* thrown at runtime.
*/
public void appendValue(DataInputBuffer value) throws IOException {
int valueLength = value.getLength() - value.getPosition();
assert(valueLength >= 0);
writeValue(value.getData(), value.getPosition(), valueLength);
buffer.reset();
++numRecordsWritten;
prevKey = REPEAT_KEY;
}
/**
* Appends the value to previous key. Assumes that the caller has already done relevant checks
* for identical keys. Also, no validations are done in this method
*/
public <V> void appendValues(Iterator<V> valuesItr) throws IOException {
while(valuesItr.hasNext()) {
appendValue(valuesItr.next());
}
}
/**
* Append key and its associated set of values.
*/
public <K, V> void appendKeyValues(K key, Iterator<V> valuesItr) throws IOException {
if (valuesItr.hasNext()) {
append(key, valuesItr.next()); //append first KV pair
}
//append the remaining values
while(valuesItr.hasNext()) {
appendValue(valuesItr.next());
}
}
/**
* Send key/value to be appended to IFile. To represent same key as previous
* one, send IFile.REPEAT_KEY as key parameter. Should not call this method with
* IFile.REPEAT_KEY as the first key. It is caller's responsibility to pass non-negative
* key/value lengths. Otherwise,IndexOutOfBoundsException could be thrown at runtime.
*/
public void append(DataInputBuffer key, DataInputBuffer value) throws IOException {
int keyLength = key.getLength() - key.getPosition();
assert(key == REPEAT_KEY || keyLength >=0);
int valueLength = value.getLength() - value.getPosition();
assert(valueLength >= 0);
sameKey = (key == REPEAT_KEY);
if (!sameKey && rle) {
sameKey = (keyLength != 0) && (BufferUtils.compare(previous, key) == 0);
}
if (!sameKey) {
writeKVPair(key.getData(), key.getPosition(), keyLength,
value.getData(), value.getPosition(), valueLength);
if (rle) {
BufferUtils.copy(key, previous);
}
} else {
writeValue(value.getData(), value.getPosition(), valueLength);
}
prevKey = (sameKey) ? REPEAT_KEY : key;
++numRecordsWritten;
}
protected void writeValue(byte[] data, int offset, int length) throws IOException {
writeRLE(out);
WritableUtils.writeVInt(out, length); // value length
out.write(data, offset, length);
// Update bytes written
decompressedBytesWritten +=
length + WritableUtils.getVIntSize(length);
if (serializedUncompressedBytes != null) {
serializedUncompressedBytes.increment(length);
}
totalKeySaving++;
}
protected void writeKVPair(byte[] keyData, int keyPos, int keyLength,
byte[] valueData, int valPos, int valueLength) throws IOException {
writeValueMarker(out);
WritableUtils.writeVInt(out, keyLength);
WritableUtils.writeVInt(out, valueLength);
out.write(keyData, keyPos, keyLength);
out.write(valueData, valPos, valueLength);
// Update bytes written
decompressedBytesWritten +=
keyLength + valueLength + WritableUtils.getVIntSize(keyLength)
+ WritableUtils.getVIntSize(valueLength);
if (serializedUncompressedBytes != null) {
serializedUncompressedBytes.increment(keyLength + valueLength);
}
}
protected void writeRLE(DataOutputStream out) throws IOException {
/*
* To strike a balance between 2 use cases (lots of unique KV in stream
* vs lots of identical KV in stream), we start off by writing KV pair.
* If subsequent KV is identical, we write RLE marker along with V_END_MARKER
* {KL1, VL1, K1, V1}
* {RLE, VL2, V2, VL3, V3, ...V_END_MARKER}
*/
if (prevKey != REPEAT_KEY) {
WritableUtils.writeVInt(out, RLE_MARKER);
decompressedBytesWritten += RLE_MARKER_SIZE;
rleWritten++;
}
}
protected void writeValueMarker(DataOutputStream out) throws IOException {
/*
* Write V_END_MARKER only in RLE scenario. This will
* save space in conditions where lots of unique KV pairs are found in the
* stream.
*/
if (prevKey == REPEAT_KEY) {
WritableUtils.writeVInt(out, V_END_MARKER);
decompressedBytesWritten += V_END_MARKER_SIZE;
}
}
// Required for mark/reset
public DataOutputStream getOutputStream () {
return out;
}
// Required for mark/reset
public void updateCountersForExternalAppend(long length) {
++numRecordsWritten;
decompressedBytesWritten += length;
}
public long getRawLength() {
return decompressedBytesWritten;
}
public long getCompressedLength() {
return compressedBytesWritten;
}
}
/**
* <code>IFile.Reader</code> to read intermediate map-outputs.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public static class Reader {
public enum KeyState {NO_KEY, NEW_KEY, SAME_KEY}
private static final int DEFAULT_BUFFER_SIZE = 128*1024;
@VisibleForTesting
// Not final for testing
protected static int MAX_BUFFER_SIZE
= Integer.MAX_VALUE - 8; // The maximum array size is a little less than the
// max integer value. Trying to create a larger array
// will result in an OOM exception. The exact value
// is JVM dependent so setting it to max int - 8 to be safe.
// Count records read from disk
private long numRecordsRead = 0;
private final TezCounter readRecordsCounter;
private final TezCounter bytesReadCounter;
final InputStream in; // Possibly decompressed stream that we read
Decompressor decompressor;
public long bytesRead = 0;
final long fileLength;
protected boolean eof = false;
IFileInputStream checksumIn;
protected byte[] buffer = null;
protected int bufferSize = DEFAULT_BUFFER_SIZE;
protected DataInputStream dataIn = null;
protected int recNo = 1;
protected int originalKeyLength;
protected int prevKeyLength;
private byte[] keyBytes = new byte[0];
protected int currentKeyLength;
protected int currentValueLength;
long startPos;
/**
* Construct an IFile Reader.
*
* @param fs FileSystem
* @param file Path of the file to be opened. This file should have
* checksum bytes for the data at the end of the file.
* @param codec codec
* @param readsCounter Counter for records read from disk
*/
public Reader(FileSystem fs, Path file,
CompressionCodec codec,
TezCounter readsCounter, TezCounter bytesReadCounter, boolean ifileReadAhead,
int ifileReadAheadLength, int bufferSize) throws IOException {
this(fs.open(file), fs.getFileStatus(file).getLen(), codec,
readsCounter, bytesReadCounter, ifileReadAhead,
ifileReadAheadLength, bufferSize);
}
/**
* Construct an IFile Reader.
*
* @param in The input stream
* @param length Length of the data in the stream, including the checksum
* bytes.
* @param codec codec
* @param readsCounter Counter for records read from disk
*/
public Reader(InputStream in, long length,
CompressionCodec codec,
TezCounter readsCounter, TezCounter bytesReadCounter,
boolean readAhead, int readAheadLength,
int bufferSize) throws IOException {
this(in, ((in != null) ? (length - HEADER.length) : length), codec,
readsCounter, bytesReadCounter, readAhead, readAheadLength,
bufferSize, (in != null && isCompressedFlagEnabled(in)));
if (in != null && bytesReadCounter != null) {
bytesReadCounter.increment(IFile.HEADER.length);
}
}
/**
* Construct an IFile Reader.
*
* @param in The input stream
* @param length Length of the data in the stream, including the checksum
* bytes.
* @param codec codec
* @param readsCounter Counter for records read from disk
*/
public Reader(InputStream in, long length,
CompressionCodec codec,
TezCounter readsCounter, TezCounter bytesReadCounter,
boolean readAhead, int readAheadLength,
int bufferSize, boolean isCompressed) throws IOException {
if (in != null) {
checksumIn = new IFileInputStream(in, length, readAhead,
readAheadLength/* , isCompressed */);
if (isCompressed && codec != null) {
decompressor = CodecUtils.getDecompressor(codec);
if (decompressor != null) {
this.in = CodecUtils.createInputStream(codec, checksumIn, decompressor);
} else {
LOG.warn("Could not obtain decompressor from CodecPool");
this.in = checksumIn;
}
} else {
this.in = checksumIn;
}
startPos = checksumIn.getPosition();
} else {
this.in = null;
}
if (in != null) {
this.dataIn = new DataInputStream(this.in);
}
this.readRecordsCounter = readsCounter;
this.bytesReadCounter = bytesReadCounter;
this.fileLength = length;
this.bufferSize = Math.max(0, bufferSize);
}
/**
* Read entire ifile content to memory.
*/
public static void readToMemory(byte[] buffer, InputStream in, int compressedLength,
CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength)
throws IOException {
boolean isCompressed = IFile.Reader.isCompressedFlagEnabled(in);
IFileInputStream checksumIn = new IFileInputStream(in,
compressedLength - IFile.HEADER.length, ifileReadAhead,
ifileReadAheadLength);
in = checksumIn;
Decompressor decompressor = null;
if (isCompressed && codec != null) {
decompressor = CodecUtils.getDecompressor(codec);
if (decompressor != null) {
decompressor.reset();
in = CodecUtils.getDecompressedInputStreamWithBufferSize(codec, checksumIn, decompressor,
compressedLength);
} else {
LOG.warn("Could not obtain decompressor from CodecPool");
}
}
try {
IOUtils.readFully(in, buffer, 0, buffer.length - IFile.HEADER.length);
/*
* We've gotten the amount of data we were expecting. Verify the
* decompressor has nothing more to offer. This action also forces the
* decompressor to read any trailing bytes that weren't critical for
* decompression, which is necessary to keep the stream in sync.
*/
if (in.read() >= 0) {
throw new IOException("Unexpected extra bytes from input stream");
}
} catch (IOException ioe) {
if(in != null) {
try {
in.close();
} catch(IOException e) {
LOG.debug("Exception in closing {}", in, e);
}
}
throw ioe;
} finally {
if (decompressor != null) {
decompressor.reset();
CodecPool.returnDecompressor(decompressor);
}
}
}
/**
* Read entire IFile content to disk.
*
* @param out the output stream that will receive the data
* @param in the input stream containing the IFile data
* @param length the amount of data to read from the input
* @return the number of bytes copied
*/
public static long readToDisk(OutputStream out, InputStream in, long length,
boolean ifileReadAhead, int ifileReadAheadLength)
throws IOException {
final int BYTES_TO_READ = 64 * 1024;
byte[] buf = new byte[BYTES_TO_READ];
// copy the IFile header
if (length < HEADER.length) {
throw new IOException("Missing IFile header");
}
IOUtils.readFully(in, buf, 0, HEADER.length);
verifyHeaderMagic(buf);
out.write(buf, 0, HEADER.length);
long bytesLeft = length - HEADER.length;
@SuppressWarnings("resource")
IFileInputStream ifInput = new IFileInputStream(in, bytesLeft,
ifileReadAhead, ifileReadAheadLength);
while (bytesLeft > 0) {
int n = ifInput.readWithChecksum(buf, 0, (int) Math.min(bytesLeft, BYTES_TO_READ));
if (n < 0) {
throw new IOException("read past end of stream");
}
out.write(buf, 0, n);
bytesLeft -= n;
}
return length - bytesLeft;
}
public long getLength() {
return fileLength - checksumIn.getSize();
}
public long getPosition() throws IOException {
return checksumIn.getPosition();
}
/**
* Read up to len bytes into buf starting at offset off.
*
* @param buf buffer
* @param off offset
* @param len length of buffer
* @return the no. of bytes read
*/
private int readData(byte[] buf, int off, int len) throws IOException {
int bytesRead = 0;
while (bytesRead < len) {
int n = IOUtils.wrappedReadForCompressedData(in, buf, off + bytesRead,
len - bytesRead);
if (n < 0) {
return bytesRead;
}
bytesRead += n;
}
return len;
}
protected void readValueLength(DataInput dIn) throws IOException {
currentValueLength = WritableUtils.readVInt(dIn);
bytesRead += WritableUtils.getVIntSize(currentValueLength);
if (currentValueLength == V_END_MARKER) {
readKeyValueLength(dIn);
}
}
protected void readKeyValueLength(DataInput dIn) throws IOException {
currentKeyLength = WritableUtils.readVInt(dIn);
currentValueLength = WritableUtils.readVInt(dIn);
if (currentKeyLength != RLE_MARKER) {
// original key length
originalKeyLength = currentKeyLength;
}
bytesRead +=
WritableUtils.getVIntSize(currentKeyLength)
+ WritableUtils.getVIntSize(currentValueLength);
}
/**
* Reset key length and value length for next record in the file
*
* @param dIn
* @return true if key length and value length were set to the next
* false if end of file (EOF) marker was reached
*/
protected boolean positionToNextRecord(DataInput dIn) throws IOException {
// Sanity check
if (eof) {
throw new IOException(String.format("Reached EOF. Completed reading %d", bytesRead));
}
prevKeyLength = currentKeyLength;
if (prevKeyLength == RLE_MARKER) {
// Same key as previous one. Just read value length alone
readValueLength(dIn);
} else {
readKeyValueLength(dIn);
}
// Check for EOF
if (currentKeyLength == EOF_MARKER && currentValueLength == EOF_MARKER) {
eof = true;
return false;
}
// Sanity check
if (currentKeyLength != RLE_MARKER && currentKeyLength < 0) {
throw new IOException("Rec# " + recNo + ": Negative key-length: " +
currentKeyLength + " PreviousKeyLen: " + prevKeyLength);
}
if (currentValueLength < 0) {
throw new IOException("Rec# " + recNo + ": Negative value-length: " +
currentValueLength);
}
return true;
}
public final boolean nextRawKey(DataInputBuffer key) throws IOException {
return readRawKey(key) != KeyState.NO_KEY;
}
private static byte[] createLargerArray(int currentLength) {
if (currentLength > MAX_BUFFER_SIZE) {
throw new IllegalArgumentException(
String.format(REQ_BUFFER_SIZE_TOO_LARGE, currentLength, MAX_BUFFER_SIZE));
}
int newLength;
if (currentLength > (MAX_BUFFER_SIZE - currentLength)) {
// possible overflow: if (2*currentLength > MAX_BUFFER_SIZE)
newLength = currentLength;
} else {
newLength = currentLength << 1;
}
return new byte[newLength];
}
public KeyState readRawKey(DataInputBuffer key) throws IOException {
if (!positionToNextRecord(dataIn)) {
if (LOG.isDebugEnabled()) {
LOG.debug("currentKeyLength=" + currentKeyLength +
", currentValueLength=" + currentValueLength +
", bytesRead=" + bytesRead +
", length=" + fileLength);
}
return KeyState.NO_KEY;
}
if(currentKeyLength == RLE_MARKER) {
// get key length from original key
key.reset(keyBytes, originalKeyLength);
return KeyState.SAME_KEY;
}
if (keyBytes.length < currentKeyLength) {
keyBytes = createLargerArray(currentKeyLength);
}
int i = readData(keyBytes, 0, currentKeyLength);
if (i != currentKeyLength) {
throw new IOException(String.format(INCOMPLETE_READ, currentKeyLength, i));
}
key.reset(keyBytes, currentKeyLength);
bytesRead += currentKeyLength;
return KeyState.NEW_KEY;
}
public void nextRawValue(DataInputBuffer value) throws IOException {
final byte[] valBytes;
if ((value.getData().length < currentValueLength) || (value.getData() == keyBytes)) {
valBytes = createLargerArray(currentValueLength);
} else {
valBytes = value.getData();
}
int i = readData(valBytes, 0, currentValueLength);
if (i != currentValueLength) {
throw new IOException(String.format(INCOMPLETE_READ, currentValueLength, i));
}
value.reset(valBytes, currentValueLength);
// Record the bytes read
bytesRead += currentValueLength;
++recNo;
++numRecordsRead;
}
private static void verifyHeaderMagic(byte[] header) throws IOException {
if (!(header[0] == 'T' && header[1] == 'I'
&& header[2] == 'F')) {
throw new IOException("Not a valid ifile header");
}
}
public static boolean isCompressedFlagEnabled(InputStream in) throws IOException {
byte[] header = new byte[HEADER.length];
IOUtils.readFully(in, header, 0, HEADER.length);
verifyHeaderMagic(header);
return (header[3] == 1);
}
public void close() throws IOException {
// Close the underlying stream
in.close();
// Release the buffer
dataIn = null;
buffer = null;
if (readRecordsCounter != null) {
readRecordsCounter.increment(numRecordsRead);
}
if (bytesReadCounter != null) {
bytesReadCounter.increment(checksumIn.getPosition() - startPos + checksumIn.getSize());
}
// Return the decompressor
if (decompressor != null) {
decompressor.reset();
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
public void reset(int offset) {}
public void disableChecksumValidation() {
checksumIn.disableChecksumValidation();
}
}
}
|
googleapis/google-cloud-java | 37,368 | java-functions/proto-google-cloud-functions-v2alpha/src/main/java/com/google/cloud/functions/v2alpha/SecretEnvVar.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/functions/v2alpha/functions.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.functions.v2alpha;
/**
*
*
* <pre>
* Configuration for a secret environment variable. It has the information
* necessary to fetch the secret value from secret manager and expose it as an
* environment variable.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v2alpha.SecretEnvVar}
*/
public final class SecretEnvVar extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.functions.v2alpha.SecretEnvVar)
SecretEnvVarOrBuilder {
private static final long serialVersionUID = 0L;
// Use SecretEnvVar.newBuilder() to construct.
private SecretEnvVar(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SecretEnvVar() {
key_ = "";
projectId_ = "";
secret_ = "";
version_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SecretEnvVar();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v2alpha.FunctionsProto
.internal_static_google_cloud_functions_v2alpha_SecretEnvVar_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v2alpha.FunctionsProto
.internal_static_google_cloud_functions_v2alpha_SecretEnvVar_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v2alpha.SecretEnvVar.class,
com.google.cloud.functions.v2alpha.SecretEnvVar.Builder.class);
}
public static final int KEY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object key_ = "";
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The key.
*/
@java.lang.Override
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The bytes for key.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the
* project ID) of the project that contains the secret. If not set, it is
* assumed that the secret is in the same project as the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The projectId.
*/
@java.lang.Override
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the
* project ID) of the project that contains the secret. If not set, it is
* assumed that the secret is in the same project as the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The bytes for projectId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SECRET_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object secret_ = "";
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The secret.
*/
@java.lang.Override
public java.lang.String getSecret() {
java.lang.Object ref = secret_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
secret_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The bytes for secret.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSecretBytes() {
java.lang.Object ref = secret_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
secret_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VERSION_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object version_ = "";
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances
* start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The version.
*/
@java.lang.Override
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
}
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances
* start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The bytes for version.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secret_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, secret_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secret_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, secret_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.functions.v2alpha.SecretEnvVar)) {
return super.equals(obj);
}
com.google.cloud.functions.v2alpha.SecretEnvVar other =
(com.google.cloud.functions.v2alpha.SecretEnvVar) obj;
if (!getKey().equals(other.getKey())) return false;
if (!getProjectId().equals(other.getProjectId())) return false;
if (!getSecret().equals(other.getSecret())) return false;
if (!getVersion().equals(other.getVersion())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (37 * hash) + SECRET_FIELD_NUMBER;
hash = (53 * hash) + getSecret().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.functions.v2alpha.SecretEnvVar prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration for a secret environment variable. It has the information
* necessary to fetch the secret value from secret manager and expose it as an
* environment variable.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v2alpha.SecretEnvVar}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.functions.v2alpha.SecretEnvVar)
com.google.cloud.functions.v2alpha.SecretEnvVarOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v2alpha.FunctionsProto
.internal_static_google_cloud_functions_v2alpha_SecretEnvVar_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v2alpha.FunctionsProto
.internal_static_google_cloud_functions_v2alpha_SecretEnvVar_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v2alpha.SecretEnvVar.class,
com.google.cloud.functions.v2alpha.SecretEnvVar.Builder.class);
}
// Construct using com.google.cloud.functions.v2alpha.SecretEnvVar.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
key_ = "";
projectId_ = "";
secret_ = "";
version_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.functions.v2alpha.FunctionsProto
.internal_static_google_cloud_functions_v2alpha_SecretEnvVar_descriptor;
}
@java.lang.Override
public com.google.cloud.functions.v2alpha.SecretEnvVar getDefaultInstanceForType() {
return com.google.cloud.functions.v2alpha.SecretEnvVar.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.functions.v2alpha.SecretEnvVar build() {
com.google.cloud.functions.v2alpha.SecretEnvVar result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.functions.v2alpha.SecretEnvVar buildPartial() {
com.google.cloud.functions.v2alpha.SecretEnvVar result =
new com.google.cloud.functions.v2alpha.SecretEnvVar(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.functions.v2alpha.SecretEnvVar result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.key_ = key_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.projectId_ = projectId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.secret_ = secret_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.version_ = version_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.functions.v2alpha.SecretEnvVar) {
return mergeFrom((com.google.cloud.functions.v2alpha.SecretEnvVar) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.functions.v2alpha.SecretEnvVar other) {
if (other == com.google.cloud.functions.v2alpha.SecretEnvVar.getDefaultInstance())
return this;
if (!other.getKey().isEmpty()) {
key_ = other.key_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getSecret().isEmpty()) {
secret_ = other.secret_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getVersion().isEmpty()) {
version_ = other.version_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
key_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
projectId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
secret_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
version_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object key_ = "";
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The key.
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The bytes for key.
*/
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @param value The key to set.
* @return This builder for chaining.
*/
public Builder setKey(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearKey() {
key_ = getDefaultInstance().getKey();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @param value The bytes for key to set.
* @return This builder for chaining.
*/
public Builder setKeyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
key_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the
* project ID) of the project that contains the secret. If not set, it is
* assumed that the secret is in the same project as the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The projectId.
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the
* project ID) of the project that contains the secret. If not set, it is
* assumed that the secret is in the same project as the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The bytes for projectId.
*/
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the
* project ID) of the project that contains the secret. If not set, it is
* assumed that the secret is in the same project as the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @param value The projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the
* project ID) of the project that contains the secret. If not set, it is
* assumed that the secret is in the same project as the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the
* project ID) of the project that contains the secret. If not set, it is
* assumed that the secret is in the same project as the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @param value The bytes for projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object secret_ = "";
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The secret.
*/
public java.lang.String getSecret() {
java.lang.Object ref = secret_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
secret_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The bytes for secret.
*/
public com.google.protobuf.ByteString getSecretBytes() {
java.lang.Object ref = secret_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
secret_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @param value The secret to set.
* @return This builder for chaining.
*/
public Builder setSecret(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
secret_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearSecret() {
secret_ = getDefaultInstance().getSecret();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @param value The bytes for secret to set.
* @return This builder for chaining.
*/
public Builder setSecretBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
secret_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object version_ = "";
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances
* start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The version.
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances
* start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The bytes for version.
*/
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances
* start.
* </pre>
*
* <code>string version = 4;</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances
* start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = getDefaultInstance().getVersion();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances
* start.
* </pre>
*
* <code>string version = 4;</code>
*
* @param value The bytes for version to set.
* @return This builder for chaining.
*/
public Builder setVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
version_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.functions.v2alpha.SecretEnvVar)
}
// @@protoc_insertion_point(class_scope:google.cloud.functions.v2alpha.SecretEnvVar)
private static final com.google.cloud.functions.v2alpha.SecretEnvVar DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.functions.v2alpha.SecretEnvVar();
}
public static com.google.cloud.functions.v2alpha.SecretEnvVar getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SecretEnvVar> PARSER =
new com.google.protobuf.AbstractParser<SecretEnvVar>() {
@java.lang.Override
public SecretEnvVar parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SecretEnvVar> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SecretEnvVar> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.functions.v2alpha.SecretEnvVar getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,470 | java-retail/proto-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/ListServingConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2alpha/serving_config_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2alpha;
/**
*
*
* <pre>
* Response for ListServingConfigs method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2alpha.ListServingConfigsResponse}
*/
public final class ListServingConfigsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2alpha.ListServingConfigsResponse)
ListServingConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServingConfigsResponse.newBuilder() to construct.
private ListServingConfigsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServingConfigsResponse() {
servingConfigs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServingConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2alpha.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2alpha_ListServingConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2alpha.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2alpha_ListServingConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2alpha.ListServingConfigsResponse.class,
com.google.cloud.retail.v2alpha.ListServingConfigsResponse.Builder.class);
}
public static final int SERVING_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2alpha.ServingConfig> servingConfigs_;
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2alpha.ServingConfig> getServingConfigsList() {
return servingConfigs_;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2alpha.ServingConfigOrBuilder>
getServingConfigsOrBuilderList() {
return servingConfigs_;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public int getServingConfigsCount() {
return servingConfigs_.size();
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2alpha.ServingConfig getServingConfigs(int index) {
return servingConfigs_.get(index);
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2alpha.ServingConfigOrBuilder getServingConfigsOrBuilder(
int index) {
return servingConfigs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < servingConfigs_.size(); i++) {
output.writeMessage(1, servingConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < servingConfigs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, servingConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2alpha.ListServingConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2alpha.ListServingConfigsResponse other =
(com.google.cloud.retail.v2alpha.ListServingConfigsResponse) obj;
if (!getServingConfigsList().equals(other.getServingConfigsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServingConfigsCount() > 0) {
hash = (37 * hash) + SERVING_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getServingConfigsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2alpha.ListServingConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListServingConfigs method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2alpha.ListServingConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2alpha.ListServingConfigsResponse)
com.google.cloud.retail.v2alpha.ListServingConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2alpha.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2alpha_ListServingConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2alpha.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2alpha_ListServingConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2alpha.ListServingConfigsResponse.class,
com.google.cloud.retail.v2alpha.ListServingConfigsResponse.Builder.class);
}
// Construct using com.google.cloud.retail.v2alpha.ListServingConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (servingConfigsBuilder_ == null) {
servingConfigs_ = java.util.Collections.emptyList();
} else {
servingConfigs_ = null;
servingConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2alpha.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2alpha_ListServingConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListServingConfigsResponse getDefaultInstanceForType() {
return com.google.cloud.retail.v2alpha.ListServingConfigsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListServingConfigsResponse build() {
com.google.cloud.retail.v2alpha.ListServingConfigsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListServingConfigsResponse buildPartial() {
com.google.cloud.retail.v2alpha.ListServingConfigsResponse result =
new com.google.cloud.retail.v2alpha.ListServingConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2alpha.ListServingConfigsResponse result) {
if (servingConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
servingConfigs_ = java.util.Collections.unmodifiableList(servingConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.servingConfigs_ = servingConfigs_;
} else {
result.servingConfigs_ = servingConfigsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.retail.v2alpha.ListServingConfigsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2alpha.ListServingConfigsResponse) {
return mergeFrom((com.google.cloud.retail.v2alpha.ListServingConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2alpha.ListServingConfigsResponse other) {
if (other == com.google.cloud.retail.v2alpha.ListServingConfigsResponse.getDefaultInstance())
return this;
if (servingConfigsBuilder_ == null) {
if (!other.servingConfigs_.isEmpty()) {
if (servingConfigs_.isEmpty()) {
servingConfigs_ = other.servingConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServingConfigsIsMutable();
servingConfigs_.addAll(other.servingConfigs_);
}
onChanged();
}
} else {
if (!other.servingConfigs_.isEmpty()) {
if (servingConfigsBuilder_.isEmpty()) {
servingConfigsBuilder_.dispose();
servingConfigsBuilder_ = null;
servingConfigs_ = other.servingConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
servingConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServingConfigsFieldBuilder()
: null;
} else {
servingConfigsBuilder_.addAllMessages(other.servingConfigs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2alpha.ServingConfig m =
input.readMessage(
com.google.cloud.retail.v2alpha.ServingConfig.parser(), extensionRegistry);
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.add(m);
} else {
servingConfigsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2alpha.ServingConfig> servingConfigs_ =
java.util.Collections.emptyList();
private void ensureServingConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
servingConfigs_ =
new java.util.ArrayList<com.google.cloud.retail.v2alpha.ServingConfig>(servingConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.ServingConfig,
com.google.cloud.retail.v2alpha.ServingConfig.Builder,
com.google.cloud.retail.v2alpha.ServingConfigOrBuilder>
servingConfigsBuilder_;
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2alpha.ServingConfig> getServingConfigsList() {
if (servingConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(servingConfigs_);
} else {
return servingConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public int getServingConfigsCount() {
if (servingConfigsBuilder_ == null) {
return servingConfigs_.size();
} else {
return servingConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.ServingConfig getServingConfigs(int index) {
if (servingConfigsBuilder_ == null) {
return servingConfigs_.get(index);
} else {
return servingConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder setServingConfigs(
int index, com.google.cloud.retail.v2alpha.ServingConfig value) {
if (servingConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServingConfigsIsMutable();
servingConfigs_.set(index, value);
onChanged();
} else {
servingConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder setServingConfigs(
int index, com.google.cloud.retail.v2alpha.ServingConfig.Builder builderForValue) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.set(index, builderForValue.build());
onChanged();
} else {
servingConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(com.google.cloud.retail.v2alpha.ServingConfig value) {
if (servingConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServingConfigsIsMutable();
servingConfigs_.add(value);
onChanged();
} else {
servingConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(
int index, com.google.cloud.retail.v2alpha.ServingConfig value) {
if (servingConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServingConfigsIsMutable();
servingConfigs_.add(index, value);
onChanged();
} else {
servingConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(
com.google.cloud.retail.v2alpha.ServingConfig.Builder builderForValue) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.add(builderForValue.build());
onChanged();
} else {
servingConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(
int index, com.google.cloud.retail.v2alpha.ServingConfig.Builder builderForValue) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.add(index, builderForValue.build());
onChanged();
} else {
servingConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder addAllServingConfigs(
java.lang.Iterable<? extends com.google.cloud.retail.v2alpha.ServingConfig> values) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, servingConfigs_);
onChanged();
} else {
servingConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder clearServingConfigs() {
if (servingConfigsBuilder_ == null) {
servingConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
servingConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public Builder removeServingConfigs(int index) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.remove(index);
onChanged();
} else {
servingConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.ServingConfig.Builder getServingConfigsBuilder(
int index) {
return getServingConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.ServingConfigOrBuilder getServingConfigsOrBuilder(
int index) {
if (servingConfigsBuilder_ == null) {
return servingConfigs_.get(index);
} else {
return servingConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.retail.v2alpha.ServingConfigOrBuilder>
getServingConfigsOrBuilderList() {
if (servingConfigsBuilder_ != null) {
return servingConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(servingConfigs_);
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.ServingConfig.Builder addServingConfigsBuilder() {
return getServingConfigsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2alpha.ServingConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2alpha.ServingConfig.Builder addServingConfigsBuilder(
int index) {
return getServingConfigsFieldBuilder()
.addBuilder(index, com.google.cloud.retail.v2alpha.ServingConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2alpha.ServingConfig serving_configs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2alpha.ServingConfig.Builder>
getServingConfigsBuilderList() {
return getServingConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.ServingConfig,
com.google.cloud.retail.v2alpha.ServingConfig.Builder,
com.google.cloud.retail.v2alpha.ServingConfigOrBuilder>
getServingConfigsFieldBuilder() {
if (servingConfigsBuilder_ == null) {
servingConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.ServingConfig,
com.google.cloud.retail.v2alpha.ServingConfig.Builder,
com.google.cloud.retail.v2alpha.ServingConfigOrBuilder>(
servingConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
servingConfigs_ = null;
}
return servingConfigsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2alpha.ListServingConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2alpha.ListServingConfigsResponse)
private static final com.google.cloud.retail.v2alpha.ListServingConfigsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2alpha.ListServingConfigsResponse();
}
public static com.google.cloud.retail.v2alpha.ListServingConfigsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServingConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServingConfigsResponse>() {
@java.lang.Override
public ListServingConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServingConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServingConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListServingConfigsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-common | 37,428 | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.RandomDatum;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestCodec {
private static final Log LOG= LogFactory.getLog(TestCodec.class);
private Configuration conf = new Configuration();
private int count = 10000;
private int seed = new Random().nextInt();
@Test
public void testDefaultCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.DefaultCodec");
}
@Test
public void testGzipCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.GzipCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
}
@Test(timeout=20000)
public void testBZip2Codec() throws IOException {
Configuration conf = new Configuration();
conf.set("io.compression.codec.bzip2.library", "java-builtin");
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");
}
@Test(timeout=20000)
public void testBZip2NativeCodec() throws IOException {
Configuration conf = new Configuration();
conf.set("io.compression.codec.bzip2.library", "system-native");
if (NativeCodeLoader.isNativeCodeLoaded()) {
if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
codecTest(conf, seed, count,
"org.apache.hadoop.io.compress.BZip2Codec");
conf.set("io.compression.codec.bzip2.library", "java-builtin");
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
codecTest(conf, seed, count,
"org.apache.hadoop.io.compress.BZip2Codec");
} else {
LOG.warn("Native hadoop library available but native bzip2 is not");
}
}
}
@Test
public void testSnappyCodec() throws IOException {
if (SnappyCodec.isNativeCodeLoaded()) {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec");
}
}
@Test
public void testLz4Codec() throws IOException {
if (NativeCodeLoader.isNativeCodeLoaded()) {
if (Lz4Codec.isNativeCodeLoaded()) {
conf.setBoolean(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
false);
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
conf.setBoolean(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
true);
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
} else {
Assert.fail("Native hadoop library available but lz4 not");
}
}
}
@Test
public void testDeflateCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DeflateCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.DeflateCodec");
}
@Test
public void testGzipCodecWithParam() throws IOException {
Configuration conf = new Configuration(this.conf);
ZlibFactory.setCompressionLevel(conf, CompressionLevel.BEST_COMPRESSION);
ZlibFactory.setCompressionStrategy(conf, CompressionStrategy.HUFFMAN_ONLY);
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.GzipCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
}
private static void codecTest(Configuration conf, int seed, int count,
String codecClass)
throws IOException {
// Create the codec
CompressionCodec codec = null;
try {
codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException("Illegal codec!");
}
LOG.info("Created a Codec object of type: " + codecClass);
// Generate data
DataOutputBuffer data = new DataOutputBuffer();
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for(int i=0; i < count; ++i) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
key.write(data);
value.write(data);
}
LOG.info("Generated " + count + " records");
// Compress data
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter =
codec.createOutputStream(compressedDataBuffer);
DataOutputStream deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter));
deflateOut.write(data.getData(), 0, data.getLength());
deflateOut.flush();
deflateFilter.finish();
LOG.info("Finished compressing data");
// De-compress data
DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
compressedDataBuffer.getLength());
CompressionInputStream inflateFilter =
codec.createInputStream(deCompressedDataBuffer);
DataInputStream inflateIn =
new DataInputStream(new BufferedInputStream(inflateFilter));
// Check
DataInputBuffer originalData = new DataInputBuffer();
originalData.reset(data.getData(), 0, data.getLength());
DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
for(int i=0; i < count; ++i) {
RandomDatum k1 = new RandomDatum();
RandomDatum v1 = new RandomDatum();
k1.readFields(originalIn);
v1.readFields(originalIn);
RandomDatum k2 = new RandomDatum();
RandomDatum v2 = new RandomDatum();
k2.readFields(inflateIn);
v2.readFields(inflateIn);
assertTrue("original and compressed-then-decompressed-output not equal",
k1.equals(k2) && v1.equals(v2));
// original and compressed-then-decompressed-output have the same hashCode
Map<RandomDatum, String> m = new HashMap<RandomDatum, String>();
m.put(k1, k1.toString());
m.put(v1, v1.toString());
String result = m.get(k2);
assertEquals("k1 and k2 hashcode not equal", result, k1.toString());
result = m.get(v2);
assertEquals("v1 and v2 hashcode not equal", result, v1.toString());
}
// De-compress data byte-at-a-time
originalData.reset(data.getData(), 0, data.getLength());
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
compressedDataBuffer.getLength());
inflateFilter =
codec.createInputStream(deCompressedDataBuffer);
// Check
originalIn = new DataInputStream(new BufferedInputStream(originalData));
int expected;
do {
expected = originalIn.read();
assertEquals("Inflated stream read by byte does not match",
expected, inflateFilter.read());
} while (expected != -1);
LOG.info("SUCCESS! Completed checking " + count + " records");
}
@Test
public void testSplitableCodecs() throws Exception {
testSplitableCodec(BZip2Codec.class);
}
private void testSplitableCodec(
Class<? extends SplittableCompressionCodec> codecClass)
throws IOException {
final long DEFLBYTES = 2 * 1024 * 1024;
final Configuration conf = new Configuration();
final Random rand = new Random();
final long seed = rand.nextLong();
LOG.info("seed: " + seed);
rand.setSeed(seed);
SplittableCompressionCodec codec =
ReflectionUtils.newInstance(codecClass, conf);
final FileSystem fs = FileSystem.getLocal(conf);
final FileStatus infile =
fs.getFileStatus(writeSplitTestFile(fs, rand, codec, DEFLBYTES));
if (infile.getLen() > Integer.MAX_VALUE) {
fail("Unexpected compression: " + DEFLBYTES + " -> " + infile.getLen());
}
final int flen = (int) infile.getLen();
final Text line = new Text();
final Decompressor dcmp = CodecPool.getDecompressor(codec);
try {
for (int pos = 0; pos < infile.getLen(); pos += rand.nextInt(flen / 8)) {
// read from random positions, verifying that there exist two sequential
// lines as written in writeSplitTestFile
final SplitCompressionInputStream in =
codec.createInputStream(fs.open(infile.getPath()), dcmp,
pos, flen, SplittableCompressionCodec.READ_MODE.BYBLOCK);
if (in.getAdjustedStart() >= flen) {
break;
}
LOG.info("SAMPLE " + in.getAdjustedStart() + "," + in.getAdjustedEnd());
final LineReader lreader = new LineReader(in);
lreader.readLine(line); // ignore; likely partial
if (in.getPos() >= flen) {
break;
}
lreader.readLine(line);
final int seq1 = readLeadingInt(line);
lreader.readLine(line);
if (in.getPos() >= flen) {
break;
}
final int seq2 = readLeadingInt(line);
assertEquals("Mismatched lines", seq1 + 1, seq2);
}
} finally {
CodecPool.returnDecompressor(dcmp);
}
// remove on success
fs.delete(infile.getPath().getParent(), true);
}
private static int readLeadingInt(Text txt) throws IOException {
DataInputStream in =
new DataInputStream(new ByteArrayInputStream(txt.getBytes()));
return in.readInt();
}
/** Write infLen bytes (deflated) to file in test dir using codec.
* Records are of the form
* <i><b64 rand><i+i><b64 rand>
*/
private static Path writeSplitTestFile(FileSystem fs, Random rand,
CompressionCodec codec, long infLen) throws IOException {
final int REC_SIZE = 1024;
final Path wd = new Path(new Path(
System.getProperty("test.build.data", "/tmp")).makeQualified(fs),
codec.getClass().getSimpleName());
final Path file = new Path(wd, "test" + codec.getDefaultExtension());
final byte[] b = new byte[REC_SIZE];
final Base64 b64 = new Base64(0, null);
DataOutputStream fout = null;
Compressor cmp = CodecPool.getCompressor(codec);
try {
fout = new DataOutputStream(codec.createOutputStream(
fs.create(file, true), cmp));
final DataOutputBuffer dob = new DataOutputBuffer(REC_SIZE * 4 / 3 + 4);
int seq = 0;
while (infLen > 0) {
rand.nextBytes(b);
final byte[] b64enc = b64.encode(b); // ensures rand printable, no LF
dob.reset();
dob.writeInt(seq);
System.arraycopy(dob.getData(), 0, b64enc, 0, dob.getLength());
fout.write(b64enc);
fout.write('\n');
++seq;
infLen -= b64enc.length;
}
LOG.info("Wrote " + seq + " records to " + file);
} finally {
IOUtils.cleanup(LOG, fout);
CodecPool.returnCompressor(cmp);
}
return file;
}
@Test
public void testCodecPoolGzipReuse() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (!ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.warn("testCodecPoolGzipReuse skipped: native libs not loaded");
return;
}
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
Compressor c1 = CodecPool.getCompressor(gzc);
Compressor c2 = CodecPool.getCompressor(dfc);
CodecPool.returnCompressor(c1);
CodecPool.returnCompressor(c2);
assertTrue("Got mismatched ZlibCompressor", c2 != CodecPool.getCompressor(gzc));
}
private static void gzipReinitTest(Configuration conf, CompressionCodec codec)
throws IOException {
// Add codec to cache
ZlibFactory.setCompressionLevel(conf, CompressionLevel.BEST_COMPRESSION);
ZlibFactory.setCompressionStrategy(conf,
CompressionStrategy.DEFAULT_STRATEGY);
Compressor c1 = CodecPool.getCompressor(codec);
CodecPool.returnCompressor(c1);
// reset compressor's compression level to perform no compression
ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
Compressor c2 = CodecPool.getCompressor(codec, conf);
// ensure same compressor placed earlier
assertTrue("Got mismatched ZlibCompressor", c1 == c2);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
CompressionOutputStream cos = null;
// write trivially compressable data
byte[] b = new byte[1 << 15];
Arrays.fill(b, (byte) 43);
try {
cos = codec.createOutputStream(bos, c2);
cos.write(b);
} finally {
if (cos != null) {
cos.close();
}
CodecPool.returnCompressor(c2);
}
byte[] outbytes = bos.toByteArray();
// verify data were not compressed
assertTrue("Compressed bytes contrary to configuration",
outbytes.length >= b.length);
}
private static void codecTestWithNOCompression (Configuration conf,
String codecClass) throws IOException {
// Create a compressor with NO_COMPRESSION and make sure that
// output is not compressed by comparing the size with the
// original input
CompressionCodec codec = null;
ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
try {
codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException("Illegal codec!");
}
Compressor c = codec.createCompressor();
// ensure same compressor placed earlier
ByteArrayOutputStream bos = new ByteArrayOutputStream();
CompressionOutputStream cos = null;
// write trivially compressable data
byte[] b = new byte[1 << 15];
Arrays.fill(b, (byte) 43);
try {
cos = codec.createOutputStream(bos, c);
cos.write(b);
} finally {
if (cos != null) {
cos.close();
}
}
byte[] outbytes = bos.toByteArray();
// verify data were not compressed
assertTrue("Compressed bytes contrary to configuration(NO_COMPRESSION)",
outbytes.length >= b.length);
}
@Test
public void testCodecInitWithCompressionLevel() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.info("testCodecInitWithCompressionLevel with native");
codecTestWithNOCompression(conf,
"org.apache.hadoop.io.compress.GzipCodec");
codecTestWithNOCompression(conf,
"org.apache.hadoop.io.compress.DefaultCodec");
} else {
LOG.warn("testCodecInitWithCompressionLevel for native skipped"
+ ": native libs not loaded");
}
conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
codecTestWithNOCompression( conf,
"org.apache.hadoop.io.compress.DefaultCodec");
}
@Test
public void testCodecPoolCompressorReinit() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
gzipReinitTest(conf, gzc);
} else {
LOG.warn("testCodecPoolCompressorReinit skipped: native libs not loaded");
}
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
gzipReinitTest(conf, dfc);
}
@Test
public void testSequenceFileDefaultCodec() throws IOException, ClassNotFoundException,
InstantiationException, IllegalAccessException {
sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.DefaultCodec", 100);
sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.DefaultCodec", 1000000);
}
@Test(timeout=20000)
public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundException,
InstantiationException, IllegalAccessException {
Configuration conf = new Configuration();
conf.set("io.compression.codec.bzip2.library", "java-builtin");
sequenceFileCodecTest(conf, 0, "org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000);
}
@Test(timeout=20000)
public void testSequenceFileBZip2NativeCodec() throws IOException,
ClassNotFoundException, InstantiationException,
IllegalAccessException {
Configuration conf = new Configuration();
conf.set("io.compression.codec.bzip2.library", "system-native");
if (NativeCodeLoader.isNativeCodeLoaded()) {
if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
sequenceFileCodecTest(conf, 0,
"org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 100,
"org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 200000,
"org.apache.hadoop.io.compress.BZip2Codec",
1000000);
} else {
LOG.warn("Native hadoop library available but native bzip2 is not");
}
}
}
@Test
public void testSequenceFileDeflateCodec() throws IOException, ClassNotFoundException,
InstantiationException, IllegalAccessException {
sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.DeflateCodec", 100);
sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.DeflateCodec", 1000000);
}
private static void sequenceFileCodecTest(Configuration conf, int lines,
String codecClass, int blockSize)
throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException {
Path filePath = new Path("SequenceFileCodecTest." + codecClass);
// Configuration
conf.setInt("io.seqfile.compress.blocksize", blockSize);
// Create the SequenceFile
FileSystem fs = FileSystem.get(conf);
LOG.info("Creating SequenceFile with codec \"" + codecClass + "\"");
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, filePath,
Text.class, Text.class, CompressionType.BLOCK,
(CompressionCodec)Class.forName(codecClass).newInstance());
// Write some data
LOG.info("Writing to SequenceFile...");
for (int i=0; i<lines; i++) {
Text key = new Text("key" + i);
Text value = new Text("value" + i);
writer.append(key, value);
}
writer.close();
// Read the data back and check
LOG.info("Reading from the SequenceFile...");
SequenceFile.Reader reader = new SequenceFile.Reader(fs, filePath, conf);
Writable key = (Writable)reader.getKeyClass().newInstance();
Writable value = (Writable)reader.getValueClass().newInstance();
int lc = 0;
try {
while (reader.next(key, value)) {
assertEquals("key" + lc, key.toString());
assertEquals("value" + lc, value.toString());
lc ++;
}
} finally {
reader.close();
}
assertEquals(lines, lc);
// Delete temporary files
fs.delete(filePath, false);
LOG.info("SUCCESS! Completed SequenceFileCodecTest with codec \"" + codecClass + "\"");
}
/**
* Regression test for HADOOP-8423: seeking in a block-compressed
* stream would not properly reset the block decompressor state.
*/
@Test
public void testSnappyMapFile() throws Exception {
Assume.assumeTrue(SnappyCodec.isNativeCodeLoaded());
codecTestMapFile(SnappyCodec.class, CompressionType.BLOCK, 100);
}
private void codecTestMapFile(Class<? extends CompressionCodec> clazz,
CompressionType type, int records) throws Exception {
FileSystem fs = FileSystem.get(conf);
LOG.info("Creating MapFiles with " + records +
" records using codec " + clazz.getSimpleName());
Path path = new Path(new Path(
System.getProperty("test.build.data", "/tmp")),
clazz.getSimpleName() + "-" + type + "-" + records);
LOG.info("Writing " + path);
createMapFile(conf, fs, path, clazz.newInstance(), type, records);
MapFile.Reader reader = new MapFile.Reader(path, conf);
Text key1 = new Text("002");
assertNotNull(reader.get(key1, new Text()));
Text key2 = new Text("004");
assertNotNull(reader.get(key2, new Text()));
}
private static void createMapFile(Configuration conf, FileSystem fs, Path path,
CompressionCodec codec, CompressionType type, int records) throws IOException {
MapFile.Writer writer =
new MapFile.Writer(conf, path,
MapFile.Writer.keyClass(Text.class),
MapFile.Writer.valueClass(Text.class),
MapFile.Writer.compression(type, codec));
Text key = new Text();
for (int j = 0; j < records; j++) {
key.set(String.format("%03d", j));
writer.append(key, key);
}
writer.close();
}
public static void main(String[] args) throws IOException {
int count = 10000;
String codecClass = "org.apache.hadoop.io.compress.DefaultCodec";
String usage = "TestCodec [-count N] [-codec <codec class>]";
if (args.length == 0) {
System.err.println(usage);
System.exit(-1);
}
for (int i=0; i < args.length; ++i) { // parse command line
if (args[i] == null) {
continue;
} else if (args[i].equals("-count")) {
count = Integer.parseInt(args[++i]);
} else if (args[i].equals("-codec")) {
codecClass = args[++i];
}
}
Configuration conf = new Configuration();
int seed = 0;
// Note that exceptions will propagate out.
codecTest(conf, seed, count, codecClass);
}
@Test
public void testGzipCompatibility() throws IOException {
Random r = new Random();
long seed = r.nextLong();
r.setSeed(seed);
LOG.info("seed: " + seed);
DataOutputBuffer dflbuf = new DataOutputBuffer();
GZIPOutputStream gzout = new GZIPOutputStream(dflbuf);
byte[] b = new byte[r.nextInt(128 * 1024 + 1)];
r.nextBytes(b);
gzout.write(b);
gzout.close();
DataInputBuffer gzbuf = new DataInputBuffer();
gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
Decompressor decom = codec.createDecompressor();
assertNotNull(decom);
assertEquals(BuiltInGzipDecompressor.class, decom.getClass());
InputStream gzin = codec.createInputStream(gzbuf, decom);
dflbuf.reset();
IOUtils.copyBytes(gzin, dflbuf, 4096);
final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength());
assertArrayEquals(b, dflchk);
}
void GzipConcatTest(Configuration conf,
Class<? extends Decompressor> decomClass) throws IOException {
Random r = new Random();
long seed = r.nextLong();
r.setSeed(seed);
LOG.info(decomClass + " seed: " + seed);
final int CONCAT = r.nextInt(4) + 3;
final int BUFLEN = 128 * 1024;
DataOutputBuffer dflbuf = new DataOutputBuffer();
DataOutputBuffer chkbuf = new DataOutputBuffer();
byte[] b = new byte[BUFLEN];
for (int i = 0; i < CONCAT; ++i) {
GZIPOutputStream gzout = new GZIPOutputStream(dflbuf);
r.nextBytes(b);
int len = r.nextInt(BUFLEN);
int off = r.nextInt(BUFLEN - len);
chkbuf.write(b, off, len);
gzout.write(b, off, len);
gzout.close();
}
final byte[] chk = Arrays.copyOf(chkbuf.getData(), chkbuf.getLength());
CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
Decompressor decom = codec.createDecompressor();
assertNotNull(decom);
assertEquals(decomClass, decom.getClass());
DataInputBuffer gzbuf = new DataInputBuffer();
gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
InputStream gzin = codec.createInputStream(gzbuf, decom);
dflbuf.reset();
IOUtils.copyBytes(gzin, dflbuf, 4096);
final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength());
assertArrayEquals(chk, dflchk);
}
@Test
public void testBuiltInGzipConcat() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
GzipConcatTest(conf, BuiltInGzipDecompressor.class);
}
@Test
public void testNativeGzipConcat() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (!ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.warn("skipped: native libs not loaded");
return;
}
GzipConcatTest(conf, GzipCodec.GzipZlibDecompressor.class);
}
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
private void verifyGzipFile(String filename, String msg) throws IOException {
BufferedReader r = new BufferedReader(new InputStreamReader(
new GZIPInputStream(new FileInputStream(filename))));
try {
String line = r.readLine();
assertEquals("Got invalid line back from " + filename, msg, line);
} finally {
r.close();
new File(filename).delete();
}
}
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
public void testGzipCodecWrite(boolean useNative) throws IOException {
// Create a gzipped file using a compressor from the CodecPool,
// and try to read it back via the regular GZIPInputStream.
// Use native libs per the parameter
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, useNative);
if (useNative) {
if (!ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.warn("testGzipCodecWrite skipped: native libs not loaded");
return;
}
} else {
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
}
// Ensure that the CodecPool has a BuiltInZlibDeflater in it.
Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
assertNotNull("zlibCompressor is null!", zlibCompressor);
assertTrue("ZlibFactory returned unexpected deflator",
useNative ? zlibCompressor instanceof ZlibCompressor
: zlibCompressor instanceof BuiltInZlibDeflater);
CodecPool.returnCompressor(zlibCompressor);
// Create a GZIP text file via the Compressor interface.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
final String msg = "This is the message we are going to compress.";
final String tmpDir = System.getProperty("test.build.data", "/tmp/");
final String fileName = new Path(new Path(tmpDir),
"testGzipCodecWrite.txt.gz").toString();
BufferedWriter w = null;
Compressor gzipCompressor = CodecPool.getCompressor(codec);
if (null != gzipCompressor) {
// If it gives us back a Compressor, we should be able to use this
// to write files we can then read back with Java's gzip tools.
OutputStream os = new CompressorStream(new FileOutputStream(fileName),
gzipCompressor);
w = new BufferedWriter(new OutputStreamWriter(os));
w.write(msg);
w.close();
CodecPool.returnCompressor(gzipCompressor);
verifyGzipFile(fileName, msg);
}
// Create a gzip text file via codec.getOutputStream().
w = new BufferedWriter(new OutputStreamWriter(
codec.createOutputStream(new FileOutputStream(fileName))));
w.write(msg);
w.close();
verifyGzipFile(fileName, msg);
}
@Test
public void testGzipCodecWriteJava() throws IOException {
testGzipCodecWrite(false);
}
@Test
public void testGzipNativeCodecWrite() throws IOException {
testGzipCodecWrite(true);
}
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
}
|
apache/impala | 37,596 | fe/src/main/java/org/apache/impala/analysis/TableDef.java | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.impala.analysis;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
import org.apache.impala.authorization.Privilege;
import org.apache.impala.catalog.Column;
import org.apache.impala.catalog.FeFsTable;
import org.apache.impala.catalog.FeTable;
import org.apache.impala.catalog.HdfsStorageDescriptor;
import org.apache.impala.catalog.RowFormat;
import org.apache.impala.catalog.Type;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.common.FileSystemUtil;
import org.apache.impala.common.Pair;
import org.apache.impala.thrift.TAccessEvent;
import org.apache.impala.thrift.TBucketInfo;
import org.apache.impala.thrift.TBucketType;
import org.apache.impala.thrift.TCatalogObjectType;
import org.apache.impala.thrift.THdfsFileFormat;
import org.apache.impala.thrift.TQueryOptions;
import org.apache.impala.thrift.TSortingOrder;
import org.apache.impala.util.AcidUtils;
import org.apache.impala.util.KuduUtil;
import org.apache.impala.util.MetaStoreUtil;
import org.apache.thrift.TException;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
/**
* Represents the table parameters in a CREATE TABLE statement. These parameters
* correspond to the following clauses in a CREATE TABLE statement:
* - EXTERNAL
* - IF NOT EXISTS
* - PARTITIONED BY
* - PARTITION BY
* - ROWFORMAT
* - FILEFORMAT
* - COMMENT
* - SERDEPROPERTIES
* - TBLPROPERTIES
* - LOCATION
* - CACHED IN
* - SORT BY
*/
class TableDef {
// Name of the new table
private final TableName tableName_;
// List of column definitions
private final List<ColumnDef> columnDefs_ = new ArrayList<>();
// Names of primary key columns. Populated by the parser. An empty value doesn't
// mean no primary keys were specified as the columnDefs_ could contain primary keys.
private final List<String> primaryKeyColNames_ = new ArrayList<>();
// If true, the primary key is unique. If not, and the table is a Kudu table then an
// auto-incrementing column will be added automatically by Kudu engine. This extra key
// column helps produce a unique composite primary key (primary keys +
// auto-incrementing construct).
// This is also used for Iceberg table and set to false if "NOT ENFORCED" is provided
// for the primary key.
private boolean isPrimaryKeyUnique_;
// If true, the table's data will be preserved if dropped.
private final boolean isExternal_;
// If true, no errors are thrown if the table already exists.
private final boolean ifNotExists_;
// Partitioning parameters.
private final TableDataLayout dataLayout_;
/////////////////////////////////////////
// BEGIN: Members that need to be reset()
// Authoritative list of primary key column definitions populated during analysis.
private final List<ColumnDef> primaryKeyColDefs_ = new ArrayList<>();
// Hive primary keys and foreign keys structures populated during analysis.
List<SQLPrimaryKey> sqlPrimaryKeys_ = new ArrayList<>();
List<SQLForeignKey> sqlForeignKeys_ = new ArrayList<>();
public List<SQLPrimaryKey> getSqlPrimaryKeys() {
return sqlPrimaryKeys_;
}
public List<SQLForeignKey> getSqlForeignKeys() {
return sqlForeignKeys_;
}
// True if analyze() has been called.
private boolean isAnalyzed_ = false;
// Generated properties set during analysis. Currently used by Kudu and Iceberg.
private Map<String, String> generatedProperties_ = new HashMap<>();
// END: Members that need to be reset()
/////////////////////////////////////////
/**
* Set of table options. These options are grouped together for convenience while
* parsing CREATE TABLE statements. They are typically found at the end of CREATE
* TABLE statements.
*/
static class Options {
// Optional list of columns to sort data by when inserting into this table.
final List<String> sortCols;
// Comment to attach to the table
final String comment;
// Custom row format of the table. Leave null to specify default row format.
final RowFormat rowFormat;
// Key/values to persist with table serde metadata.
final Map<String, String> serdeProperties;
// File format of the table
final THdfsFileFormat fileFormat;
// The HDFS location of where the table data will stored.
final HdfsUri location;
// The HDFS caching op that should be applied to this table.
final HdfsCachingOp cachingOp;
// Key/values to persist with table metadata.
final Map<String, String> tblProperties;
// Sorting order for SORT BY queries.
final TSortingOrder sortingOrder;
// Bucket desc for CLUSTERED BY
final TBucketInfo bucketInfo;
Options(TBucketInfo bucketInfo, Pair<List<String>, TSortingOrder> sortProperties,
String comment, RowFormat rowFormat, Map<String, String> serdeProperties,
THdfsFileFormat fileFormat, HdfsUri location, HdfsCachingOp cachingOp,
Map<String, String> tblProperties, TQueryOptions queryOptions) {
this.sortCols = sortProperties.first;
this.sortingOrder = sortProperties.second;
this.comment = comment;
this.rowFormat = rowFormat;
Preconditions.checkNotNull(serdeProperties);
this.serdeProperties = serdeProperties;
// The file format passed via STORED AS <file format> has a higher precedence than
// the one set in query options.
this.fileFormat = (fileFormat != null) ?
fileFormat : queryOptions.getDefault_file_format();
this.location = location;
this.cachingOp = cachingOp;
Preconditions.checkNotNull(tblProperties);
this.tblProperties = tblProperties;
this.bucketInfo = bucketInfo;
}
public Options(String comment, TQueryOptions queryOptions) {
// Passing null to file format so that it uses the file format from the query option
// if specified, otherwise it will use the default file format, which is TEXT.
this(null, new Pair<>(ImmutableList.of(), TSortingOrder.LEXICAL), comment,
RowFormat.DEFAULT_ROW_FORMAT, new HashMap<>(), /* file format */null, null,
null, new HashMap<>(), queryOptions);
}
}
private Options options_;
/**
* Primary Key attributes grouped together to be populated by the parser.
* Currently only defined for HDFS tables.
*/
static class PrimaryKey {
// Primary key table name
final TableName pkTableName;
// Primary Key columns
final List<String> primaryKeyColNames;
// Primary Key constraint name
final String pkConstraintName;
// Constraints
final boolean relyCstr;
final boolean validateCstr;
final boolean enableCstr;
public PrimaryKey(TableName pkTableName, List<String> primaryKeyColNames,
String pkConstraintName, boolean relyCstr,
boolean validateCstr, boolean enableCstr) {
this.pkTableName = pkTableName;
this.primaryKeyColNames = primaryKeyColNames;
this.pkConstraintName = pkConstraintName;
this.relyCstr = relyCstr;
this.validateCstr = validateCstr;
this.enableCstr = enableCstr;
}
public TableName getPkTableName() {
return pkTableName;
}
public List<String> getPrimaryKeyColNames() {
return primaryKeyColNames;
}
public String getPkConstraintName() {
return pkConstraintName;
}
public boolean isRelyCstr() {
return relyCstr;
}
public boolean isValidateCstr() {
return validateCstr;
}
public boolean isEnableCstr() {
return enableCstr;
}
}
/**
* Foreign Key attributes grouped together to be populated by the parser.
* Currently only defined for HDFS tables. An FK definition is of the form
* "foreign key(col1, col2) references pk_tbl(col3, col4)"
*/
static class ForeignKey {
// Primary key table
final TableName pkTableName;
// Primary key cols
final List<String> primaryKeyColNames;
// Foreign key cols
final List<String> foreignKeyColNames;
// Name of fk
String fkConstraintName;
// Fully qualified pk name. Set during analysis.
TableName fullyQualifiedPkTableName;
// Constraints
final boolean relyCstr;
final boolean validateCstr;
final boolean enableCstr;
ForeignKey(TableName pkTableName, List<String> primaryKeyColNames,
List<String> foreignKeyColNames, String fkName, boolean relyCstr,
boolean validateCstr, boolean enableCstr) {
this.pkTableName = pkTableName;
this.primaryKeyColNames = primaryKeyColNames;
this.foreignKeyColNames = foreignKeyColNames;
this.relyCstr = relyCstr;
this.validateCstr = validateCstr;
this.enableCstr = enableCstr;
this.fkConstraintName = fkName;
}
public TableName getPkTableName() {
return pkTableName;
}
public List<String> getPrimaryKeyColNames() {
return primaryKeyColNames;
}
public List<String> getForeignKeyColNames() {
return foreignKeyColNames;
}
public String getFkConstraintName() {
return fkConstraintName;
}
public void setConstraintName(String constraintName) {
fkConstraintName = constraintName;
}
public TableName getFullyQualifiedPkTableName() {
return fullyQualifiedPkTableName;
}
public boolean isRelyCstr() {
return relyCstr;
}
public boolean isValidateCstr() {
return validateCstr;
}
public boolean isEnableCstr() {
return enableCstr;
}
}
// A TableDef will have only one primary key.
private PrimaryKey primaryKey_;
// There maybe multiple foreign keys for a TableDef forming multiple PK-FK
// relationships.
private List<ForeignKey> foreignKeysList_ = new ArrayList<>();
// Result of analysis.
private TableName fqTableName_;
TableDef(TableName tableName, boolean isExternal, boolean ifNotExists) {
tableName_ = tableName;
isExternal_ = isExternal;
ifNotExists_ = ifNotExists;
dataLayout_ = TableDataLayout.createEmptyLayout();
}
public void reset() {
primaryKeyColDefs_.clear();
columnDefs_.clear();
isAnalyzed_ = false;
generatedProperties_.clear();
}
public TableName getTblName() {
return fqTableName_ != null ? fqTableName_ : tableName_;
}
public String getTbl() { return tableName_.getTbl(); }
public boolean isAnalyzed() { return isAnalyzed_; }
List<ColumnDef> getColumnDefs() { return columnDefs_; }
List<String> getColumnNames() { return ColumnDef.toColumnNames(columnDefs_); }
List<Type> getColumnTypes() {
return columnDefs_.stream().map(col -> col.getType()).collect(Collectors.toList());
}
public void setPrimaryKey(TableDef.PrimaryKey primaryKey) {
this.primaryKey_ = primaryKey;
}
public void setPrimaryKeyUnique(boolean isKeyUnique) {
this.isPrimaryKeyUnique_ = isKeyUnique;
}
List<String> getPartitionColumnNames() {
return ColumnDef.toColumnNames(getPartitionColumnDefs());
}
List<ColumnDef> getPartitionColumnDefs() {
return dataLayout_.getPartitionColumnDefs();
}
boolean isKuduTable() { return options_.fileFormat == THdfsFileFormat.KUDU; }
boolean isIcebergTable() { return options_.fileFormat == THdfsFileFormat.ICEBERG; }
List<String> getPrimaryKeyColumnNames() { return primaryKeyColNames_; }
List<ColumnDef> getPrimaryKeyColumnDefs() { return primaryKeyColDefs_; }
boolean isPrimaryKeyUnique() { return isPrimaryKeyUnique_; }
boolean isExternal() { return isExternal_; }
boolean getIfNotExists() { return ifNotExists_; }
Map<String, String> getGeneratedProperties() { return generatedProperties_; }
void putGeneratedProperty(String key, String value) {
Preconditions.checkNotNull(key);
generatedProperties_.put(key, value);
}
List<KuduPartitionParam> getKuduPartitionParams() {
return dataLayout_.getKuduPartitionParams();
}
List<IcebergPartitionSpec> getIcebergPartitionSpecs() {
return dataLayout_.getIcebergPartitionSpecs();
}
void setOptions(Options options) {
Preconditions.checkNotNull(options);
options_ = options;
}
List<String> getSortColumns() { return options_.sortCols; }
String getComment() { return options_.comment; }
Map<String, String> getTblProperties() { return options_.tblProperties; }
HdfsCachingOp getCachingOp() { return options_.cachingOp; }
HdfsUri getLocation() { return options_.location; }
Map<String, String> getSerdeProperties() { return options_.serdeProperties; }
THdfsFileFormat getFileFormat() { return options_.fileFormat; }
RowFormat getRowFormat() { return options_.rowFormat; }
TSortingOrder getSortingOrder() { return options_.sortingOrder; }
List<ForeignKey> getForeignKeysList() { return foreignKeysList_; }
TBucketInfo geTBucketInfo() { return options_.bucketInfo; }
boolean isBucketableFormat() {
return options_.fileFormat != THdfsFileFormat.KUDU
&& options_.fileFormat != THdfsFileFormat.ICEBERG
&& options_.fileFormat != THdfsFileFormat.HUDI_PARQUET
&& options_.fileFormat != THdfsFileFormat.JDBC;
}
/**
* Analyzes the parameters of a CREATE TABLE statement.
*/
void analyze(Analyzer analyzer) throws AnalysisException {
if (isAnalyzed_) return;
Preconditions.checkState(tableName_ != null && !tableName_.isEmpty());
fqTableName_ = analyzer.getFqTableName(getTblName());
fqTableName_.analyze();
analyzeAcidProperties(analyzer);
analyzeColumnDefs(analyzer);
analyzePrimaryKeys(analyzer);
analyzeForeignKeys(analyzer);
if (analyzer.dbContainsTable(getTblName().getDb(), getTbl(), Privilege.CREATE)
&& !getIfNotExists()) {
throw new AnalysisException(Analyzer.TBL_ALREADY_EXISTS_ERROR_MSG + getTblName());
}
analyzer.addAccessEvent(new TAccessEvent(fqTableName_.toString(),
TCatalogObjectType.TABLE, Privilege.CREATE.toString()));
Preconditions.checkNotNull(options_);
analyzeOptions(analyzer);
isAnalyzed_ = true;
}
/**
* Analyzes table and partition column definitions, checking whether all column
* names are unique.
*/
private void analyzeColumnDefs(Analyzer analyzer) throws AnalysisException {
Set<String> colNames = new HashSet<>();
for (ColumnDef colDef: columnDefs_) {
colDef.analyze(analyzer);
if (!colNames.add(colDef.getColName().toLowerCase())) {
throw new AnalysisException("Duplicate column name: " + colDef.getColName());
}
if (!analyzeColumnOption(colDef)) {
throw new AnalysisException(String.format("Unsupported column options for " +
"file format '%s': '%s'", getFileFormat().name(), colDef.toString()));
}
}
for (ColumnDef colDef: getPartitionColumnDefs()) {
colDef.analyze(analyzer);
if (!colDef.getType().supportsTablePartitioning()) {
throw new AnalysisException(
String.format("Type '%s' is not supported as partition-column type " +
"in column: %s", colDef.getType().toSql(), colDef.getColName()));
}
if (!colNames.add(colDef.getColName().toLowerCase())) {
throw new AnalysisException("Duplicate column name: " + colDef.getColName());
}
}
}
/**
* Kudu and Iceberg tables have their own column options, this function will return
* false if we use column options incorrectly, e.g. primary key column option for an
* Iceberg table.
*/
private boolean analyzeColumnOption(ColumnDef columnDef) {
if (isKuduTable()) {
if (columnDef.hasIncompatibleKuduOptions()) return false;
} else if (isIcebergTable()) {
if (columnDef.hasIncompatibleIcebergOptions()) return false;
} else if (columnDef.hasKuduOptions() || columnDef.hasIcebergOptions()) {
// If the table is neither Kudu or Iceberg but has some incompatible column options.
return false;
}
return true;
}
/**
* Analyzes the primary key columns. Checks if the specified primary key columns exist
* in the table column definitions and if composite primary keys are properly defined
* using the PRIMARY KEY (col,..col) clause.
*/
private void analyzePrimaryKeys(Analyzer analyzer) throws AnalysisException {
for (ColumnDef colDef: columnDefs_) {
if (colDef.isPrimaryKey()) {
primaryKeyColDefs_.add(colDef);
if (!colDef.isPrimaryKeyUnique() && !isKuduTable()) {
throw new AnalysisException(
"Non unique primary key is only supported for Kudu.");
}
}
}
if (primaryKeyColDefs_.size() > 1) {
String primaryKeyString =
KuduUtil.getPrimaryKeyString(primaryKeyColDefs_.get(0).isPrimaryKeyUnique());
throw new AnalysisException(String.format(
"Multiple %sS specified. Composite %s can be specified using the %s " +
"(col1, col2, ...) syntax at the end of the column definition.",
primaryKeyString, primaryKeyString, primaryKeyString));
}
if (primaryKeyColNames_.isEmpty()) {
if (primaryKey_ == null || primaryKey_.getPrimaryKeyColNames().isEmpty()) {
if (!isKuduTable()) return;
if (!primaryKeyColDefs_.isEmpty()) {
setPrimaryKeyUnique(primaryKeyColDefs_.get(0).isPrimaryKeyUnique());
return;
} else if (!getKuduPartitionParams().isEmpty()) {
// Promote all partition columns as non unique primary key columns if primary
// keys are not declared by the user for the Kudu table. Since key columns
// must be as the first columns in the table, only all partition columns which
// are the beginning columns of the table can be promoted as non unique primary
// key columns.
List<String> colNames = getColumnNames();
TreeMap<Integer, String> partitionCols = new TreeMap<Integer, String>();
for (KuduPartitionParam partitionParam: getKuduPartitionParams()) {
for (String colName: partitionParam.getColumnNames()) {
int index = colNames.indexOf(colName);
Preconditions.checkState(index >= 0);
partitionCols.put(index, colName);
}
}
if (partitionCols.size() > 0
&& partitionCols.lastKey() == partitionCols.size() - 1) {
primaryKeyColNames_.addAll(partitionCols.values());
setPrimaryKeyUnique(false);
analyzer.addWarning(String.format(
"Partition columns (%s) are promoted as non unique primary key.",
String.join(", ", partitionCols.values())));
} else {
throw new AnalysisException(
"Specify primary key or non unique primary key for the Kudu table, " +
"or create partitions with the beginning columns of the table.");
}
}
if (primaryKeyColNames_.isEmpty()) return;
} else {
primaryKeyColNames_.addAll(primaryKey_.getPrimaryKeyColNames());
}
}
String primaryKeyString = KuduUtil.getPrimaryKeyString(isPrimaryKeyUnique_);
if (!primaryKeyColDefs_.isEmpty()) {
throw new AnalysisException(String.format(
"Multiple %sS specified. Composite %s can be specified using the %s " +
"(col1, col2, ...) syntax at the end of the column definition.",
primaryKeyString, primaryKeyString, primaryKeyString));
} else if (!primaryKeyColNames_.isEmpty() && !isPrimaryKeyUnique()
&& !isKuduTable() && !isIcebergTable()) {
throw new AnalysisException(primaryKeyString +
" is only supported for Kudu and Iceberg.");
}
if (isIcebergTable() && isPrimaryKeyUnique_) {
throw new AnalysisException(
"Iceberg tables only support NOT ENFORCED primary keys.");
}
Set<String> hashedPKColNames = Sets.newHashSet(primaryKeyColNames_);
List<IcebergPartitionSpec> icebergPartitionSpecs = getIcebergPartitionSpecs();
if (!icebergPartitionSpecs.isEmpty()) {
Preconditions.checkState(icebergPartitionSpecs.size() == 1);
IcebergPartitionSpec partSpec = icebergPartitionSpecs.get(0);
for (IcebergPartitionField partField : partSpec.getIcebergPartitionFields()) {
if (!hashedPKColNames.contains(partField.getFieldName())) {
throw new AnalysisException("Partition columns have to be part of the " +
"primary key for Iceberg tables.");
}
}
}
Map<String, ColumnDef> colDefsByColName = ColumnDef.mapByColumnNames(columnDefs_);
int keySeq = 1;
String constraintName = null;
for (String colName: primaryKeyColNames_) {
colName = colName.toLowerCase();
ColumnDef colDef = colDefsByColName.remove(colName);
if (colDef == null) {
if (ColumnDef.toColumnNames(primaryKeyColDefs_).contains(colName)) {
throw new AnalysisException(String.format("Column '%s' is listed multiple " +
"times as a %s.", colName, primaryKeyString));
}
throw new AnalysisException(String.format("%s column '%s' does not exist in " +
"the table", primaryKeyString, colName));
}
if (colDef.isExplicitNullable()) {
throw new AnalysisException(primaryKeyString + " columns cannot be nullable: " +
colDef.toString());
}
// HDFS Table specific analysis.
if (primaryKey_ != null) {
// We do not support enable and validate for primary keys.
if (primaryKey_.isEnableCstr()) {
throw new AnalysisException("ENABLE feature is not supported yet.");
}
if (primaryKey_.isValidateCstr()) {
throw new AnalysisException("VALIDATE feature is not supported yet.");
}
// All primary keys in a composite key should have the same constraint name. This
// is necessary because of HIVE-16603. See IMPALA-9188 for details.
if (constraintName == null) {
constraintName = generateConstraintName();
}
// Each column of a primary key definition will be an SQLPrimaryKey.
sqlPrimaryKeys_.add(new SQLPrimaryKey(getTblName().getDb(), getTbl(),
colDef.getColName(), keySeq++, constraintName, primaryKey_.enableCstr,
primaryKey_.validateCstr, primaryKey_.relyCstr));
}
primaryKeyColDefs_.add(colDef);
}
}
private void analyzeForeignKeys(Analyzer analyzer) throws AnalysisException {
if (foreignKeysList_ == null || foreignKeysList_.size() == 0) return;
for (ForeignKey fk: foreignKeysList_) {
// Foreign Key and Primary Key columns don't match.
if (fk.getForeignKeyColNames().size() != fk.getPrimaryKeyColNames().size()){
throw new AnalysisException("The number of foreign key columns should be same" +
" as the number of parent key columns.");
}
String parentDb = fk.getPkTableName().getDb();
if (parentDb == null) {
parentDb = analyzer.getDefaultDb();
}
fk.fullyQualifiedPkTableName = new TableName(parentDb, fk.pkTableName.getTbl());
//Check if parent table exits
if (!analyzer.dbContainsTable(parentDb, fk.getPkTableName().getTbl(),
Privilege.VIEW_METADATA)) {
throw new AnalysisException("Parent table not found: "
+ analyzer.getFqTableName(fk.getPkTableName()));
}
//Check for primary key cols in parent table
FeTable parentTable = analyzer.getTable(fk.getPkTableName(),
Privilege.VIEW_METADATA);
if (!(parentTable instanceof FeFsTable)) {
throw new AnalysisException("Foreign keys on non-HDFS parent tables are not "
+ "supported.");
}
for (String pkCol : fk.getPrimaryKeyColNames()) {
// TODO: Check column types of parent table and child tables match. Currently HMS
// API fails if they don't, it's good to fail early during analysis here.
if (!parentTable.getColumnNames().contains(pkCol.toLowerCase())) {
throw new AnalysisException("Parent column not found: " + pkCol.toLowerCase());
}
// Hive has a bug that prevents foreign keys from being added when pk column is
// not part of primary key. This can be confusing. Till this bug is fixed, we
// will not allow foreign keys definition on such columns.
try {
if (!((FeFsTable) parentTable).getPrimaryKeyColumnNames().contains(pkCol)) {
throw new AnalysisException(String.format("Parent column %s is not part of "
+ "primary key.", pkCol));
}
} catch (TException e) {
// In local catalog mode, we do not aggressively load PK/FK information, a
// call to getPrimaryKeyColumnNames() will try to selectively load PK/FK
// information. Hence, TException is thrown only in local catalog mode.
throw new AnalysisException("Failed to get primary key columns for "
+ fk.pkTableName);
}
}
// We do not support ENABLE and VALIDATE.
if (fk.isEnableCstr()) {
throw new AnalysisException("ENABLE feature is not supported yet.");
}
if (fk.isValidateCstr()) {
throw new AnalysisException("VALIDATE feature is not supported yet.");
}
if (fk.getFkConstraintName() == null) {
fk.setConstraintName(generateConstraintName());
}
for (int i = 0; i < fk.getForeignKeyColNames().size(); i++) {
SQLForeignKey sqlForeignKey = new SQLForeignKey();
sqlForeignKey.setPktable_db(parentDb);
sqlForeignKey.setPktable_name(fk.getPkTableName().getTbl());
sqlForeignKey.setFktable_db(getTblName().getDb());
sqlForeignKey.setFktable_name(getTbl());
sqlForeignKey.setPkcolumn_name(fk.getPrimaryKeyColNames().get(i).toLowerCase());
sqlForeignKey.setFk_name(fk.getFkConstraintName());
sqlForeignKey.setKey_seq(i+1);
sqlForeignKey.setFkcolumn_name(fk.getForeignKeyColNames().get(i).toLowerCase());
sqlForeignKey.setRely_cstr(fk.isRelyCstr());
getSqlForeignKeys().add(sqlForeignKey);
}
}
}
/**
* Utility method to generate a unique constraint name when user does not specify one.
* TODO: Collisions possible? HMS doesn't have an API to query existing constraint
* names.
*/
private String generateConstraintName() {
return UUID.randomUUID().toString();
}
/**
* Analyzes the list of columns in 'sortCols' against the columns of 'table' and
* returns their matching positions in the table's columns. Each column of 'sortCols'
* must occur in 'table' as a non-partitioning column. 'table' must be an HDFS table.
* If there are errors during the analysis, this will throw an AnalysisException.
*/
public static List<Integer> analyzeSortColumns(List<String> sortCols, FeTable table,
TSortingOrder sortingOrder) throws AnalysisException {
Preconditions.checkState(table instanceof FeFsTable);
List<Type> columnTypes = table.getNonClusteringColumns().stream().map(
col -> col.getType()).collect(Collectors.toList());
return analyzeSortColumns(sortCols,
Column.toColumnNames(table.getNonClusteringColumns()),
Column.toColumnNames(table.getClusteringColumns()), columnTypes, sortingOrder);
}
/**
* Analyzes the list of columns in 'sortCols' and returns their matching positions in
* 'tableCols'. Each column must occur in 'tableCols' and must not occur in
* 'partitionCols'. If there are errors during the analysis, this will throw an
* AnalysisException.
*/
public static List<Integer> analyzeSortColumns(List<String> sortCols,
List<String> tableCols, List<String> partitionCols, List<Type> columnTypes,
TSortingOrder sortingOrder) throws AnalysisException {
// The index of each sort column in the list of table columns.
Set<Integer> colIdxs = new LinkedHashSet<>();
int numColumns = 0;
for (String sortColName: sortCols) {
++numColumns;
// Make sure it's not a partition column.
if (partitionCols.contains(sortColName)) {
throw new AnalysisException(String.format("SORT BY column list must not " +
"contain partition column: '%s'", sortColName));
}
// Determine the index of each sort column in the list of table columns.
boolean foundColumn = false;
for (int j = 0; j < tableCols.size(); ++j) {
if (tableCols.get(j).equalsIgnoreCase(sortColName)) {
if (colIdxs.contains(j)) {
throw new AnalysisException(String.format("Duplicate column in SORT BY " +
"list: %s", sortColName));
}
colIdxs.add(j);
foundColumn = true;
break;
}
}
if (!foundColumn) {
throw new AnalysisException(String.format("Could not find SORT BY column " +
"'%s' in table.", sortColName));
}
}
// Analyzing Z-Order specific constraints
if (sortingOrder == TSortingOrder.ZORDER) {
if (numColumns == 1) {
throw new AnalysisException(String.format("SORT BY ZORDER with 1 column is " +
"equivalent to SORT BY. Please, use the latter, if that was your " +
"intention."));
}
}
Preconditions.checkState(numColumns == colIdxs.size());
return Lists.newArrayList(colIdxs);
}
private void analyzeOptions(Analyzer analyzer) throws AnalysisException {
MetaStoreUtil.checkShortPropertyMap("Property", options_.tblProperties);
MetaStoreUtil.checkShortPropertyMap("Serde property", options_.serdeProperties);
if (options_.location != null) {
options_.location.analyze(analyzer, Privilege.ALL, FsAction.READ_WRITE);
}
if (options_.cachingOp != null) {
options_.cachingOp.analyze(analyzer);
if (options_.cachingOp.shouldCache() && options_.location != null &&
!FileSystemUtil.isPathCacheable(options_.location.getPath())) {
throw new AnalysisException(String.format("Location '%s' cannot be cached. " +
"Please retry without caching: CREATE TABLE ... UNCACHED",
options_.location));
}
}
// Analyze 'skip.header.line.format' property.
AlterTableSetTblProperties.analyzeSkipHeaderLineCount(options_.tblProperties);
analyzeRowFormat(analyzer);
String sortByKey = AlterTableSortByStmt.TBL_PROP_SORT_COLUMNS;
String sortOrderKey = AlterTableSortByStmt.TBL_PROP_SORT_ORDER;
if (options_.tblProperties.containsKey(sortByKey)) {
throw new AnalysisException(String.format("Table definition must not contain the " +
"%s table property. Use SORT BY (...) instead.", sortByKey));
}
if (options_.tblProperties.containsKey(sortOrderKey)) {
throw new AnalysisException(String.format("Table definition must not contain the " +
"%s table property. Use SORT BY %s (...) instead.", sortOrderKey,
options_.sortingOrder.toString()));
}
// analyze bucket columns
analyzeBucketColumns(options_.bucketInfo, getColumnNames(),
getPartitionColumnNames());
// Analyze sort columns.
if (options_.sortCols == null) return;
if (isKuduTable()) {
throw new AnalysisException(String.format("SORT BY is not supported for Kudu "+
"tables."));
}
analyzeSortColumns(options_.sortCols, getColumnNames(), getPartitionColumnNames(),
getColumnTypes(), options_.sortingOrder);
}
private void analyzeBucketColumns(TBucketInfo bucketInfo, List<String> tableCols,
List<String> partitionCols) throws AnalysisException {
if (bucketInfo == null || bucketInfo.getBucket_type() == TBucketType.NONE) {
return;
}
// Bucketed Table only support hdfs fileformat
if (!isBucketableFormat()) {
throw new AnalysisException(String.format("CLUSTERED BY not support fileformat: " +
"'%s'", options_.fileFormat));
}
if (bucketInfo.getNum_bucket() <= 0) {
throw new AnalysisException(String.format(
"Bucket's number must be greater than 0."));
}
if (bucketInfo.getBucket_columns() == null
|| bucketInfo.getBucket_columns().size() == 0) {
throw new AnalysisException(String.format(
"Bucket columns must be not null."));
}
// The index of each bucket column in the list of table columns.
Set<Integer> colIdxs = new LinkedHashSet<>();
for (String bucketCol : bucketInfo.getBucket_columns()) {
// Make sure it's not a partition column.
if (partitionCols.contains(bucketCol)) {
throw new AnalysisException(String.format("CLUSTERED BY column list must not " +
"contain partition column: '%s'", bucketCol));
}
// Determine the index of each bucket column in the list of table columns.
boolean foundColumn = false;
for (int j = 0; j < tableCols.size(); ++j) {
if (tableCols.get(j).equalsIgnoreCase(bucketCol)) {
if (colIdxs.contains(j)) {
throw new AnalysisException(String.format("Duplicate column in CLUSTERED " +
"BY list: %s", bucketCol));
}
colIdxs.add(j);
foundColumn = true;
break;
}
}
if (!foundColumn) {
throw new AnalysisException(String.format("Could not find CLUSTERED BY column " +
"'%s' in table.", bucketCol));
}
}
}
private void analyzeRowFormat(Analyzer analyzer) throws AnalysisException {
if (options_.rowFormat == null) return;
if (isKuduTable()) {
throw new AnalysisException(String.format(
"ROW FORMAT cannot be specified for file format %s.", options_.fileFormat));
}
Byte fieldDelim = analyzeRowFormatValue(options_.rowFormat.getFieldDelimiter());
Byte lineDelim = analyzeRowFormatValue(options_.rowFormat.getLineDelimiter());
Byte escapeChar = analyzeRowFormatValue(options_.rowFormat.getEscapeChar());
if (options_.fileFormat == THdfsFileFormat.TEXT) {
if (fieldDelim == null) fieldDelim = HdfsStorageDescriptor.DEFAULT_FIELD_DELIM;
if (lineDelim == null) lineDelim = HdfsStorageDescriptor.DEFAULT_LINE_DELIM;
if (escapeChar == null) escapeChar = HdfsStorageDescriptor.DEFAULT_ESCAPE_CHAR;
if (fieldDelim.equals(lineDelim)) {
throw new AnalysisException("Field delimiter and line delimiter have same " +
"value: byte " + fieldDelim);
}
if (fieldDelim.equals(escapeChar)) {
analyzer.addWarning("Field delimiter and escape character have same value: " +
"byte " + fieldDelim + ". Escape character will be ignored");
}
if (lineDelim.equals(escapeChar)) {
analyzer.addWarning("Line delimiter and escape character have same value: " +
"byte " + lineDelim + ". Escape character will be ignored");
}
}
}
private Byte analyzeRowFormatValue(String value) throws AnalysisException {
if (value == null) return null;
Byte byteVal = HdfsStorageDescriptor.parseDelim(value);
if (byteVal == null) {
throw new AnalysisException("ESCAPED BY values and LINE/FIELD " +
"terminators must be specified as a single character or as a decimal " +
"value in the range [-128:127]: " + value);
}
return byteVal;
}
/**
* Analyzes Hive ACID related properties.
* Can change table properties based on query options.
*/
private void analyzeAcidProperties(Analyzer analyzer) throws AnalysisException {
if (isExternal_) {
if (AcidUtils.isTransactionalTable(options_.tblProperties)) {
throw new AnalysisException("EXTERNAL tables cannot be transactional");
}
return;
}
if (options_.fileFormat == THdfsFileFormat.KUDU) {
if (AcidUtils.isTransactionalTable(options_.tblProperties)) {
throw new AnalysisException("Kudu tables cannot be transactional");
}
return;
}
if (options_.fileFormat == THdfsFileFormat.ICEBERG) {
if (AcidUtils.isTransactionalTable(options_.tblProperties)) {
throw new AnalysisException(
"Iceberg tables cannot have Hive ACID table properties.");
}
return;
}
AcidUtils.setTransactionalProperties(options_.tblProperties,
analyzer.getQueryOptions().getDefault_transactional_type());
}
}
|
google/copybara | 37,655 | java/com/google/copybara/git/GitHubPrOrigin.java | /*
* Copyright (C) 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.copybara.git;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.copybara.exception.ValidationException.checkCondition;
import static com.google.copybara.git.github.util.GitHubUtil.asHeadRef;
import static com.google.copybara.git.github.util.GitHubUtil.asMergeRef;
import static java.util.stream.Collectors.joining;
import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.CharMatcher;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.Uninterruptibles;
import com.google.copybara.BaselinesWithoutLabelVisitor;
import com.google.copybara.Endpoint;
import com.google.copybara.GeneralOptions;
import com.google.copybara.Origin;
import com.google.copybara.approval.ApprovalsProvider;
import com.google.copybara.authoring.Authoring;
import com.google.copybara.checks.Checker;
import com.google.copybara.exception.CannotResolveRevisionException;
import com.google.copybara.exception.EmptyChangeException;
import com.google.copybara.exception.RepoException;
import com.google.copybara.exception.ValidationException;
import com.google.copybara.git.GitOrigin.ReaderImpl;
import com.google.copybara.git.GitOrigin.SubmoduleStrategy;
import com.google.copybara.git.GitRepository.GitLogEntry;
import com.google.copybara.git.github.api.AuthorAssociation;
import com.google.copybara.git.github.api.CheckRun;
import com.google.copybara.git.github.api.CombinedStatus;
import com.google.copybara.git.github.api.GitHubApi;
import com.google.copybara.git.github.api.GitHubApi.IssuesAndPullRequestsSearchRequestParams;
import com.google.copybara.git.github.api.Issue;
import com.google.copybara.git.github.api.IssuesAndPullRequestsSearchResults;
import com.google.copybara.git.github.api.Label;
import com.google.copybara.git.github.api.PullRequest;
import com.google.copybara.git.github.api.Review;
import com.google.copybara.git.github.api.Status;
import com.google.copybara.git.github.api.Status.State;
import com.google.copybara.git.github.api.User;
import com.google.copybara.git.github.util.GitHubHost;
import com.google.copybara.git.github.util.GitHubHost.GitHubPrUrl;
import com.google.copybara.git.github.util.GitHubUtil;
import com.google.copybara.profiler.Profiler.ProfilerTask;
import com.google.copybara.revision.Change;
import com.google.copybara.transform.patch.PatchTransformation;
import com.google.copybara.util.Glob;
import com.google.copybara.util.console.Console;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
/**
* A class for reading GitHub Pull Requests
*/
public class GitHubPrOrigin implements Origin<GitRevision> {
static final int RETRY_COUNT = 3;
public static final String GITHUB_PR_NUMBER_LABEL = "GITHUB_PR_NUMBER";
public static final String GITHUB_BASE_BRANCH = "GITHUB_BASE_BRANCH";
public static final String GITHUB_BASE_BRANCH_SHA1 = "GITHUB_BASE_BRANCH_SHA1";
public static final String GITHUB_PR_USE_MERGE = "GITHUB_PR_USE_MERGE";
public static final String GITHUB_PR_TITLE = "GITHUB_PR_TITLE";
public static final String GITHUB_PR_URL = "GITHUB_PR_URL";
public static final String GITHUB_PR_BODY = "GITHUB_PR_BODY";
public static final String GITHUB_PR_USER = "GITHUB_PR_USER";
public static final String GITHUB_PR_ASSIGNEE = "GITHUB_PR_ASSIGNEE";
public static final String GITHUB_PR_REVIEWER_APPROVER = "GITHUB_PR_REVIEWER_APPROVER";
public static final String GITHUB_PR_REVIEWER_OTHER = "GITHUB_PR_REVIEWER_OTHER";
public static final String GITHUB_PR_REQUESTED_REVIEWER = "GITHUB_PR_REQUESTED_REVIEWER";
private static final String LOCAL_PR_HEAD_REF = "refs/PR_HEAD";
public static final String GITHUB_PR_HEAD_SHA = "GITHUB_PR_HEAD_SHA";
private static final String LOCAL_PR_MERGE_REF = "refs/PR_MERGE";
private static final String LOCAL_PR_BASE_BRANCH = "refs/PR_BASE_BRANCH";
private final String url;
private final boolean useMerge;
private final GeneralOptions generalOptions;
private final GitOptions gitOptions;
private final GitOriginOptions gitOriginOptions;
private final GitHubOptions gitHubOptions;
private final Set<String> requiredLabelsField;
private final Set<String> requiredStatusContextNamesField;
private final Set<String> requiredCheckRunsField;
private final Set<String> retryableLabelsField;
private final SubmoduleStrategy submoduleStrategy;
private final List<String> excludedSubmodules;
private final Console console;
private final boolean baselineFromBranch;
private final Boolean firstParent;
private final Boolean partialFetch;
private final StateFilter requiredState;
@Nullable private final ReviewState reviewState;
private final ImmutableSet<AuthorAssociation> reviewApprovers;
@Nullable private final Checker endpointChecker;
@Nullable private final PatchTransformation patchTransformation;
@Nullable private final String branch;
private final boolean describeVersion;
private final GitHubHost ghHost;
private final GitHubPrOriginOptions gitHubPrOriginOptions;
private final ApprovalsProvider provider;
@Nullable private final CredentialFileHandler credentials;
@Nullable private final GitRepositoryHook gitRepositoryHook;
GitHubPrOrigin(
String url,
boolean useMerge,
GeneralOptions generalOptions,
GitOptions gitOptions,
GitOriginOptions gitOriginOptions,
GitHubOptions gitHubOptions,
GitHubPrOriginOptions gitHubPrOriginOptions,
Set<String> requiredLabels,
Set<String> requiredStatusContextNames,
Set<String> requiredCheckRuns,
Set<String> retryableLabels,
SubmoduleStrategy submoduleStrategy,
List<String> excludedSubmodules,
boolean baselineFromBranch,
Boolean firstParent,
Boolean partialClone,
StateFilter requiredState,
@Nullable ReviewState reviewState,
ImmutableSet<AuthorAssociation> reviewApprovers,
@Nullable Checker endpointChecker,
@Nullable PatchTransformation patchTransformation,
@Nullable String branch,
boolean describeVersion,
GitHubHost ghHost,
ApprovalsProvider provider,
@Nullable CredentialFileHandler credentials,
@Nullable GitRepositoryHook gitRepositoryHook) {
this.url = checkNotNull(url);
this.useMerge = useMerge;
this.generalOptions = checkNotNull(generalOptions);
this.gitOptions = checkNotNull(gitOptions);
this.gitOriginOptions = checkNotNull(gitOriginOptions);
this.gitHubOptions = gitHubOptions;
this.gitHubPrOriginOptions = Preconditions.checkNotNull(gitHubPrOriginOptions);
this.requiredLabelsField = checkNotNull(requiredLabels);
this.requiredStatusContextNamesField = checkNotNull(requiredStatusContextNames);
this.requiredCheckRunsField = checkNotNull(requiredCheckRuns);
this.retryableLabelsField = checkNotNull(retryableLabels);
this.submoduleStrategy = checkNotNull(submoduleStrategy);
this.excludedSubmodules = excludedSubmodules;
console = generalOptions.console();
this.baselineFromBranch = baselineFromBranch;
this.firstParent = firstParent;
this.partialFetch = partialClone;
this.requiredState = checkNotNull(requiredState);
this.reviewState = reviewState;
this.reviewApprovers = checkNotNull(reviewApprovers);
this.endpointChecker = endpointChecker;
this.patchTransformation = patchTransformation;
this.branch = branch;
this.describeVersion = describeVersion;
this.ghHost = ghHost;
this.provider = checkNotNull(provider);
this.credentials = credentials;
this.gitRepositoryHook = gitRepositoryHook;
}
@Override
public GitRevision resolve(String reference) throws RepoException, ValidationException {
checkCondition(reference != null, ""
+ "A pull request reference is expected as argument in the command line."
+ " Invoke copybara as:\n"
+ " copybara copy.bara.sky workflow_name 12345");
console.progress("GitHub PR Origin: Resolving reference " + reference);
String configProjectName = ghHost.getProjectNameFromUrl(url);
// GitHub's commit 'status' webhook provides only the commit SHA
if (GitRevision.COMPLETE_SHA1_PATTERN.matcher(reference).matches()) {
PullRequest pr = getPrFromSha(configProjectName, reference);
return getRevisionForPR(configProjectName, pr);
}
// A whole https pull request url
Optional<GitHubPrUrl> githubPrUrl = ghHost.maybeParseGithubPrUrl(reference);
if (githubPrUrl.isPresent()) {
checkCondition(
githubPrUrl.get().getProject().equals(configProjectName),
"Project name should be '%s' but it is '%s' instead", configProjectName,
githubPrUrl.get().getProject());
return getRevisionForPR(
configProjectName, getPrFromNumber(configProjectName, githubPrUrl.get().getPrNumber()));
}
// A Pull request number
if (CharMatcher.digit().matchesAllOf(reference)) {
return getRevisionForPR(
ghHost.getProjectNameFromUrl(url),
getPrFromNumber(configProjectName, Integer.parseInt(reference)));
}
// refs/pull/12345/head
Optional<Integer> prNumber = GitHubUtil.maybeParseGithubPrFromHeadRef(reference);
if (prNumber.isPresent()) {
return getRevisionForPR(
configProjectName, getPrFromNumber(configProjectName, prNumber.get()));
}
throw new CannotResolveRevisionException(
String.format(
"'%s' is not a valid reference for a GitHub Pull Request. Valid formats:"
+ "'https://github.com/project/pull/1234', 'refs/pull/1234/head' or '1234'",
reference));
}
@Override
public GitRevision resolveLastRev(String reference) throws RepoException, ValidationException {
String sha1Part = Splitter.on(" ").split(reference).iterator().next();
Matcher matcher = GitRevision.COMPLETE_SHA1_PATTERN.matcher(sha1Part);
// Note that this might not work if the PR is for a different branch than the imported to
// the destination. But in this case we cannot do that much apart from --force.
if (matcher.matches()) {
return new GitRevision(getRepository(), getRepository().parseRef(sha1Part));
}
throw new CannotResolveRevisionException(String.format("'%s' is not a valid SHA.", reference));
}
@Override
public GitRevision resolveAncestorRef(String ancestorRef, GitRevision descendantRev)
throws ValidationException, RepoException {
return GitOrigin.resolveAncestorRef(this, getRepository(), ancestorRef, descendantRev);
}
@Override
@Nullable
public String showDiff(GitRevision revisionFrom, GitRevision revisionTo) throws RepoException {
return getRepository().showDiff(
checkNotNull(revisionFrom, "revisionFrom should not be null").getSha1(),
checkNotNull(revisionTo, "revisionTo should not be null").getSha1());
}
/** Given a commit SHA, use the GitHub API to (try to) look up info for a corresponding PR. */
private PullRequest getPrFromSha(String project, String sha)
throws RepoException, ValidationException {
GitHubApi gitHubApi = gitHubOptions.newGitHubRestApi(project, credentials);
IssuesAndPullRequestsSearchResults searchResults =
gitHubApi.getIssuesOrPullRequestsSearchResults(
new IssuesAndPullRequestsSearchRequestParams(
project,
sha,
IssuesAndPullRequestsSearchRequestParams.Type.PULL_REQUEST,
IssuesAndPullRequestsSearchRequestParams.State.OPEN));
ImmutableList<Long> prNumbers =
searchResults.getItems().stream().map(item -> item.getNumber()).collect(toImmutableList());
// Only migrate a pr with not-closed state and head being equal to sha
// Usually, it will return one pr. But there might be an extreme case with multiple prs
// available. We temporarily handle one pr now.
for (Long prNumber : prNumbers) {
PullRequest pullRequest = gitHubApi.getPullRequest(project, prNumber);
if (requiredState.accepts(pullRequest) && pullRequest.getHead().getSha().equals(sha)) {
return pullRequest;
}
}
String stateClause = requiredState == StateFilter.ALL ? "" : (requiredState + " state and ");
throw new EmptyChangeException(
String.format("Could not find a pr with %shead being equal to sha %s", stateClause, sha));
}
/** Given a PR number, use the GitHub API to look up the PR info. */
private PullRequest getPrFromNumber(String project, long prNumber)
throws RepoException, ValidationException {
try (ProfilerTask ignore = generalOptions.profiler().start("github_api_get_pr")) {
return gitHubOptions.newGitHubRestApi(project, credentials).getPullRequest(project, prNumber);
}
}
private GitRevision getRevisionForPR(String project, PullRequest prData)
throws RepoException, ValidationException {
GitHubApi api = gitHubOptions.newGitHubRestApi(project, credentials);
int prNumber = (int) prData.getNumber();
boolean actuallyUseMerge = this.useMerge;
ImmutableListMultimap.Builder<String, String> labels = ImmutableListMultimap.builder();
checkPrState(prData);
checkPrBranch(project, prData);
checkRequiredLabels(api, project, prData);
checkRequiredStatusContextNames(api, project, prData);
checkRequiredCheckRuns(api, project, prData);
checkReviewApprovers(api, project, prData, labels);
// Fetch also the baseline branch. It is almost free and doing a roundtrip later would hurt
// latency.
console.progressFmt("Fetching Pull Request %d and branch '%s'",
prNumber, prData.getBase().getRef());
ImmutableList.Builder<String> refSpecBuilder = ImmutableList.<String>builder()
.add(String.format("%s:%s", asHeadRef(prNumber), LOCAL_PR_HEAD_REF))
// Prefix the branch name with 'refs/heads/' since some implementations of
// GitRepository need the whole reference name.
.add(String.format("refs/heads/%s:" + LOCAL_PR_BASE_BRANCH, prData.getBase().getRef()));
if (actuallyUseMerge) {
if (!Boolean.FALSE.equals(prData.isMergeable())) {
refSpecBuilder.add(String.format("%s:%s", asMergeRef(prNumber), LOCAL_PR_MERGE_REF));
} else if (forceImport()) {
console.warnFmt(
"PR %d is not mergeable, but continuing with PR Head instead because of %s",
prNumber,
GeneralOptions.FORCE);
actuallyUseMerge = false;
} else {
throw new CannotResolveRevisionException(
String.format(
"Cannot find a merge reference for Pull Request %d."
+ " It might have a conflict with head.",
prNumber));
}
}
ImmutableList<String> refspec = refSpecBuilder.build();
try (ProfilerTask ignore = generalOptions.profiler().start("fetch")) {
getRepository()
.fetch(
ghHost.projectAsUrl(project),
/* prune= */ false,
/* force= */ true,
refspec,
partialFetch,
Optional.empty(),
false);
} catch (CannotResolveRevisionException e) {
if (actuallyUseMerge && prData.isMergeable() == null && forceImport()) {
// We can perhaps recover by fetching without the merge ref
actuallyUseMerge = false;
refspec = refspec.subList(0, refspec.size() - 1);
try (ProfilerTask ignore = generalOptions.profiler().start("fetch")) {
getRepository()
.fetch(
ghHost.projectAsUrl(project),
/* prune= */ false,
/* force= */ true,
refspec,
partialFetch,
Optional.empty(),
false);
e = null;
} catch (CannotResolveRevisionException e2) {
// Report the error from the second fetch instead of the original fetch
e = e2;
}
}
if (e != null) {
if (actuallyUseMerge) {
String msg =
String.format("Cannot find a merge reference for Pull Request %d.", prNumber);
if (Boolean.TRUE.equals(prData.isMergeable())) {
msg += " GitHub reported that this merge reference should exist.";
}
throw new CannotResolveRevisionException(msg, e);
} else {
throw new CannotResolveRevisionException(
String.format("Cannot find Pull Request %d.", prNumber), e);
}
}
}
String refForMigration = actuallyUseMerge ? LOCAL_PR_MERGE_REF : LOCAL_PR_HEAD_REF;
GitRevision gitRevision = getRepository().resolveReference(refForMigration);
String headPrSha1 = getRepository().resolveReference(LOCAL_PR_HEAD_REF).getSha1();
String integrateLabel = new GitHubPrIntegrateLabel(getRepository(), generalOptions,
project, prNumber,
prData.getHead().getLabel(),
// The integrate SHA has to be HEAD of the PR not the merge ref, even if use_merge = True
headPrSha1)
.toString();
labels.putAll(
GITHUB_PR_REQUESTED_REVIEWER,
prData.getRequestedReviewers().stream().map(User::getLogin).collect(toImmutableList()));
labels.put(GITHUB_PR_NUMBER_LABEL, Integer.toString(prNumber));
labels.put(GitModule.DEFAULT_INTEGRATE_LABEL, integrateLabel);
labels.put(GITHUB_BASE_BRANCH, prData.getBase().getRef());
labels.put(GITHUB_PR_HEAD_SHA, headPrSha1);
labels.put(GITHUB_PR_USE_MERGE, Boolean.toString(actuallyUseMerge));
String mergeBase = getRepository().mergeBase(refForMigration, LOCAL_PR_BASE_BRANCH);
labels.put(GITHUB_BASE_BRANCH_SHA1, mergeBase);
labels.put(GITHUB_PR_TITLE, prData.getTitle());
labels.put(GITHUB_PR_BODY, prData.getBody());
labels.put(GITHUB_PR_URL, prData.getHtmlUrl());
labels.put(GITHUB_PR_USER, prData.getUser().getLogin());
labels.putAll(GITHUB_PR_ASSIGNEE, prData.getAssignees().stream()
.map(User::getLogin)
.collect(Collectors.toList()));
GitRevision result = new GitRevision(
getRepository(),
gitRevision.getSha1(),
// TODO(malcon): Decide the format to use here:
/*reviewReference=*/null,
actuallyUseMerge ? asMergeRef(prNumber) : asHeadRef(prNumber),
labels.build(),
url);
return describeVersion ? getRepository().addDescribeVersion(result) : result;
}
/**
* Check that the state of the PR (i.e. {open,closed}) matches the provided value of the `state`
* param
*/
private void checkPrState(PullRequest prData) throws ValidationException {
if (!forceImport() && !requiredState.accepts(prData)) {
throw new EmptyChangeException(
String.format("Pull Request %d is %s", prData.getNumber(), prData.getState()));
}
}
/** Check that the branch name of the PR matches the provided value of the `branch` param */
private void checkPrBranch(String project, PullRequest prData) throws ValidationException {
if (!forceImport() && branch != null && !Objects.equals(prData.getBase().getRef(), branch)) {
throw new EmptyChangeException(
String.format(
"Cannot migrate http://github.com/%s/pull/%d because its base branch is '%s', but"
+ " the workflow is configured to only migrate changes for branch '%s'",
project, prData.getNumber(), prData.getBase().getRef(), branch));
}
}
/** Check that the PR has all the labels provided in the `required_labels` param */
private void checkRequiredLabels(GitHubApi api, String project, PullRequest prData)
throws ValidationException, RepoException {
Set<String> requiredLabels = getRequiredLabels();
Set<String> retryableLabels = getRetryableLabels();
if (forceImport() || requiredLabels.isEmpty()) {
return;
}
int retryCount = 0;
Set<String> requiredButNotPresent;
do {
Issue issue;
try (ProfilerTask ignore = generalOptions.profiler().start("github_api_get_issue")) {
issue = api.getIssue(project, prData.getNumber());
}
requiredButNotPresent = Sets.newHashSet(requiredLabels);
requiredButNotPresent.removeAll(Collections2.transform(issue.getLabels(), Label::getName));
// If we got all the labels we want or none of the ones we didn't get are retryable, return.
if (requiredButNotPresent.isEmpty()
|| Collections.disjoint(requiredButNotPresent, retryableLabels)) {
break;
}
Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS);
retryCount++;
} while (retryCount < RETRY_COUNT);
if (!requiredButNotPresent.isEmpty()) {
throw new EmptyChangeException(
String.format(
"Cannot migrate http://github.com/%s/pull/%d because it is missing the following"
+ " labels: %s",
project, prData.getNumber(), requiredButNotPresent));
}
}
/**
* Check that the PR has a state of "success" for each status whose context is in the list
* provided in the `required_status_context_names` param
*/
private void checkRequiredStatusContextNames(GitHubApi api, String project, PullRequest prData)
throws ValidationException, RepoException {
Set<String> requiredStatusContextNames = getRequiredStatusContextNames();
if (forceImport() || requiredStatusContextNames.isEmpty()) {
return;
}
try (ProfilerTask ignore = generalOptions.profiler()
.start("github_api_get_combined_status")) {
CombinedStatus combinedStatus = api.getCombinedStatus(project, prData.getHead().getSha());
Set<String> requiredButNotPresent = Sets.newHashSet(requiredStatusContextNames);
ImmutableList.Builder<Status> successStatuses = ImmutableList.builder();
for (Status status : combinedStatus.getStatuses()) {
if (status.getState() == State.SUCCESS) {
successStatuses.add(status);
}
}
requiredButNotPresent.removeAll(
Collections2.transform(successStatuses.build(), Status::getContext));
if (!requiredButNotPresent.isEmpty()) {
throw new EmptyChangeException(
String.format(
"Cannot migrate http://github.com/%s/pull/%d because the following ci labels "
+ "have not been passed: %s",
project, prData.getNumber(), requiredButNotPresent));
}
}
}
/**
* Check that the PR has a conclusion of "success" for each check_run whose name is in the list
* provided in the `required_check_runs` param
*/
private void checkRequiredCheckRuns(GitHubApi api, String project, PullRequest prData)
throws ValidationException, RepoException {
Set<String> requiredCheckRuns = getRequiredCheckRuns();
if (forceImport() || requiredCheckRuns.isEmpty()) {
return;
}
try (ProfilerTask ignore = generalOptions.profiler()
.start("github_api_get_combined_status")) {
ImmutableList<CheckRun> checkRuns =
api.getCheckRuns(project, prData.getHead().getSha());
Set<String> requiredButNotPresent = Sets.newHashSet(requiredCheckRuns);
List<CheckRun> passedCheckRuns =
checkRuns.stream()
.filter(e -> e.getConclusion().equals("success"))
.collect(Collectors.toList());
requiredButNotPresent.removeAll(Collections2.transform(passedCheckRuns, CheckRun::getName));
if (!requiredButNotPresent.isEmpty()) {
throw new EmptyChangeException(
String.format(
"Cannot migrate http://github.com/%s/pull/%d because the following check runs "
+ "have not been passed: %s",
project, prData.getNumber(), requiredButNotPresent));
}
}
}
/**
* Check that the PR has been approved by sufficient reviewers of the correct types, in accordance
* with the values provided in the `review_state` and `review_approvers` params
*/
private void checkReviewApprovers(
GitHubApi api,
String project,
PullRequest prData,
ImmutableListMultimap.Builder<String, String> labelsBuilder)
throws ValidationException, RepoException {
if (reviewState == null) {
return;
}
ImmutableList<Review> reviews = api.getReviews(project, prData.getNumber());
ApproverState approverState =
reviewState.shouldMigrate(reviews, reviewApprovers, prData.getHead().getSha());
if (!forceImport() && !approverState.shouldMigrate()) {
String rejected = "";
if (!approverState.rejectedReviews().isEmpty()) {
rejected =
String.format(
"\nThe following reviews were ignored because they don't meet "
+ "the association requirement of %s:\n%s",
Joiner.on(", ").join(reviewApprovers),
approverState.rejectedReviews().entries().stream()
.map(e -> String.format("User %s - Association: %s", e.getKey(), e.getValue()))
.collect(joining("\n")));
}
throw new EmptyChangeException(
String.format(
"Cannot migrate http://github.com/%s/pull/%d because it is missing the required"
+ " approvals (origin is configured as %s).%s",
project, prData.getNumber(), reviewState, rejected));
}
Set<String> approvers = new HashSet<>();
Set<String> others = new HashSet<>();
for (Review review : reviews) {
if (reviewApprovers.contains(review.getAuthorAssociation())) {
approvers.add(review.getUser().getLogin());
} else {
others.add(review.getUser().getLogin());
}
}
labelsBuilder.putAll(GITHUB_PR_REVIEWER_APPROVER, approvers);
labelsBuilder.putAll(GITHUB_PR_REVIEWER_OTHER, others);
}
@VisibleForTesting
public GitRepository getRepository() throws RepoException {
GitRepository repo = gitOptions.cachedBareRepoForUrl(url);
if (credentials != null) {
try {
credentials.install(repo, gitOptions.getConfigCredsFile(generalOptions));
} catch (IOException e) {
throw new RepoException("Unable to store credentials", e);
}
}
return repo;
}
@Override
public Reader<GitRevision> newReader(Glob originFiles, Authoring authoring)
throws ValidationException {
return new ReaderImpl(
url,
originFiles,
authoring,
gitOptions,
gitOriginOptions,
generalOptions,
/* includeBranchCommitLogs= */ false,
submoduleStrategy,
excludedSubmodules,
firstParent,
partialFetch,
patchTransformation,
/* configPath= */ null,
/* workflowName= */ null,
credentials,
gitRepositoryHook) {
/** Disable rebase since this is controlled by useMerge field. */
@Override
protected void maybeRebase(GitRepository repo, GitRevision ref, Path workdir)
throws RepoException, CannotResolveRevisionException {}
@Override
public Optional<Baseline<GitRevision>> findBaseline(GitRevision startRevision, String label)
throws RepoException, ValidationException {
if (!baselineFromBranch) {
return super.findBaseline(startRevision, label);
}
return findBaselinesWithoutLabel(startRevision, /* limit= */ 1).stream()
.map(e -> new Baseline<>(e.getSha1(), e))
.findFirst();
}
@Override
public ImmutableList<GitRevision> findBaselinesWithoutLabel(
GitRevision startRevision, int limit) throws RepoException, ValidationException {
String baseline =
Iterables.getLast(startRevision.associatedLabels().get(GITHUB_BASE_BRANCH_SHA1), null);
checkNotNull(
baseline, "%s label should be present in %s", GITHUB_BASE_BRANCH_SHA1, startRevision);
GitRevision baselineRev = getRepository().resolveReference(baseline);
// Don't skip the first change as it is already the baseline
BaselinesWithoutLabelVisitor<GitRevision> visitor =
new BaselinesWithoutLabelVisitor<>(
originFiles, limit, Optional.empty(), /* skipFirst= */ false);
visitChanges(baselineRev, visitor);
return visitor.getResult();
}
@Override
public Endpoint getFeedbackEndPoint(Console console) throws ValidationException {
gitHubOptions.validateEndpointChecker(endpointChecker);
return new GitHubEndPoint(
gitHubOptions.newGitHubApiSupplier(url, endpointChecker, credentials, ghHost),
url,
console,
ghHost,
credentials);
}
/**
* Deal with the case of useMerge. We have a new commit (the merge) and first-parent from that
* commit doesn't work for this case.
*/
@Override
public ChangesResponse<GitRevision> changes(@Nullable GitRevision fromRef, GitRevision toRef)
throws RepoException, ValidationException {
checkCondition(
toRef.associatedLabels().containsKey(GITHUB_PR_USE_MERGE),
"Cannot determine whether 'use_merge' was set.");
if (toRef.associatedLabel(GITHUB_PR_USE_MERGE).contains("false")) {
return super.changes(fromRef, toRef);
}
GitLogEntry merge =
Iterables.getOnlyElement(getRepository().log(toRef.getSha1()).withLimit(1).run());
// Fast-forward merge
if (merge.parents().size() == 1) {
return super.changes(fromRef, toRef);
}
// HEAD of the Pull Request
GitRevision gitRevision = merge.parents().get(1).withLabels(toRef.associatedLabels());
ChangesResponse<GitRevision> prChanges = super.changes(fromRef, gitRevision);
// Merge might have an effect, but we are not interested on it if the PR doesn't touch
// origin_files
if (prChanges.isEmpty()) {
return prChanges;
}
try {
return ChangesResponse.forChanges(
ImmutableList.<Change<GitRevision>>builder()
.addAll(prChanges.getChanges())
// merge commit is sourced from git log which doesn't have url context
.add(change(merge.commit().withUrl(url)))
.build());
} catch (EmptyChangeException e) {
throw new RepoException("Error getting the merge commit information: " + merge, e);
}
}
};
}
@Override
public String getLabelName() {
return GitRepository.GIT_ORIGIN_REV_ID;
}
@Override
public String getType() {
return "git.github_pr_origin";
}
@VisibleForTesting
public ReviewState getReviewState() {
return reviewState;
}
@VisibleForTesting
public Set<String> getRequiredLabels() {
return gitHubPrOriginOptions.getRequiredLabels(requiredLabelsField);
}
@VisibleForTesting
public Set<String> getRequiredStatusContextNames() {
return gitHubPrOriginOptions.getRequiredStatusContextNames(requiredStatusContextNamesField);
}
@VisibleForTesting
public Set<String> getRequiredCheckRuns() {
return gitHubPrOriginOptions.getRequiredCheckRuns(requiredCheckRunsField);
}
@VisibleForTesting
public Set<String> getRetryableLabels() {
return gitHubPrOriginOptions.getRetryableLabels(retryableLabelsField);
}
@Override
public ImmutableSetMultimap<String, String> describe(Glob originFiles) {
ImmutableSetMultimap.Builder<String, String> builder =
new ImmutableSetMultimap.Builder<String, String>()
.put("type", getType())
.put("url", url);
if (branch != null) {
builder.put("branch", branch);
}
if (!originFiles.roots().isEmpty() && !originFiles.roots().contains("")) {
builder.putAll("root", originFiles.roots());
}
if (reviewState != null) {
builder.put("review_state", reviewState.name());
builder.putAll(
"review_approvers", reviewApprovers.stream().map(Enum::name).collect(toImmutableList()));
}
if (!getRequiredLabels().isEmpty()) {
builder.putAll(GitHubUtil.REQUIRED_LABELS, getRequiredLabels());
}
if (!getRequiredStatusContextNames().isEmpty()) {
builder.putAll(GitHubUtil.REQUIRED_STATUS_CONTEXT_NAMES, getRequiredStatusContextNames());
}
if (!getRequiredCheckRuns().isEmpty()) {
builder.putAll(GitHubUtil.REQUIRED_CHECK_RUNS, getRequiredCheckRuns());
}
if (!getRetryableLabels().isEmpty()) {
builder.putAll(GitHubUtil.RETRYABLE_LABELS, getRetryableLabels());
}
return builder.build();
}
private boolean forceImport() {
return generalOptions.isForced() || gitHubPrOriginOptions.forceImport;
}
@Override
public ApprovalsProvider getApprovalsProvider() {
return provider;
}
/** Only migrate PR in one of the following states: */
public enum StateFilter {
OPEN {
@Override
public boolean accepts(PullRequest pr) {
return "open".equals(pr.getState());
}
},
CLOSED {
@Override
public boolean accepts(PullRequest pr) {
return "closed".equals(pr.getState());
}
},
ALL {
@Override
public boolean accepts(PullRequest pr) {
return true;
}
};
public abstract boolean accepts(PullRequest pr);
}
@VisibleForTesting
public enum ReviewState {
/**
* Requires that the current head commit has at least one valid approval
*/
HEAD_COMMIT_APPROVED {
@Override
boolean shouldMigrate(ImmutableList<Review> reviews, String sha) {
return reviews.stream()
.filter(e -> e.getCommitId().equals(sha))
.anyMatch(Review::isApproved);
}
},
/**
* Any valid approval, even for old commits is good.
*/
ANY_COMMIT_APPROVED {
@Override
boolean shouldMigrate(ImmutableList<Review> reviews, String sha) {
return reviews.stream().anyMatch(Review::isApproved);
}
},
/**
* There are reviewers in the change that have commented, asked for changes or approved
*/
HAS_REVIEWERS {
@Override
boolean shouldMigrate(ImmutableList<Review> reviews, String sha) {
return !reviews.isEmpty();
}
},
/**
* Import the change regardless of the the review state. It will populate the appropriate
* labels if found
*/
ANY {
@Override
boolean shouldMigrate(ImmutableList<Review> reviews, String sha) {
return true;
}
};
ApproverState shouldMigrate(
ImmutableList<Review> reviews, ImmutableSet<AuthorAssociation> approvers, String sha)
throws RepoException {
ImmutableList.Builder<Review> authorReviews = ImmutableList.builder();
ImmutableList.Builder<Review> rejectedReviews = ImmutableList.builder();
for (Review review : reviews) {
// Only take into account reviews by valid approverTypes
if (approvers.contains(review.getAuthorAssociation())) {
authorReviews.add(review);
} else {
rejectedReviews.add(review);
}
}
return ApproverState.create(
shouldMigrate(authorReviews.build(), sha), rejectedReviews.build());
}
abstract boolean shouldMigrate(ImmutableList<Review> reviews, String sha);
}
@AutoValue
abstract static class ApproverState {
public abstract boolean shouldMigrate();
public abstract ImmutableListMultimap<String, String> rejectedReviews();
public static ApproverState create(boolean shouldMigrate, ImmutableList<Review> rejectedReviews)
throws RepoException {
ImmutableListMultimap.Builder<String, String> rejected = ImmutableListMultimap.builder();
for (Review review : rejectedReviews) {
rejected.put(review.getUser().getLogin(), review.getAuthorAssociation().toString());
}
return new AutoValue_GitHubPrOrigin_ApproverState(shouldMigrate, rejected.build());
}
}
@Override
public ImmutableList<ImmutableSetMultimap<String, String>> describeCredentials() {
if (credentials == null) {
return ImmutableList.of();
}
return credentials.describeCredentials();
}
} |
openjdk/jdk8 | 37,711 | jdk/src/windows/classes/sun/java2d/d3d/D3DSurfaceData.java | /*
* Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.java2d.d3d;
import java.awt.AlphaComposite;
import java.awt.BufferCapabilities;
import java.awt.Component;
import java.awt.GraphicsConfiguration;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.Image;
import java.awt.Rectangle;
import java.awt.Transparency;
import java.awt.image.ColorModel;
import java.awt.image.DataBuffer;
import java.awt.image.DirectColorModel;
import java.awt.image.Raster;
import java.awt.image.SampleModel;
import java.awt.image.SinglePixelPackedSampleModel;
import sun.awt.SunHints;
import sun.awt.image.DataBufferNative;
import sun.awt.image.PixelConverter;
import sun.awt.image.SurfaceManager;
import sun.awt.image.WritableRasterNative;
import sun.awt.windows.WComponentPeer;
import sun.java2d.pipe.hw.AccelSurface;
import sun.java2d.InvalidPipeException;
import sun.java2d.SunGraphics2D;
import sun.java2d.SurfaceData;
import sun.java2d.loops.GraphicsPrimitive;
import sun.java2d.loops.MaskFill;
import sun.java2d.loops.SurfaceType;
import sun.java2d.loops.CompositeType;
import sun.java2d.pipe.ParallelogramPipe;
import sun.java2d.pipe.PixelToParallelogramConverter;
import sun.java2d.pipe.RenderBuffer;
import sun.java2d.pipe.TextPipe;
import static sun.java2d.pipe.BufferedOpCodes.*;
import static sun.java2d.d3d.D3DContext.D3DContextCaps.*;
import static sun.java2d.pipe.hw.ExtendedBufferCapabilities.VSyncType.*;
import sun.java2d.pipe.hw.ExtendedBufferCapabilities.VSyncType;
import java.awt.BufferCapabilities.FlipContents;
import java.awt.Window;
import sun.awt.SunToolkit;
import sun.awt.image.SunVolatileImage;
import sun.java2d.ScreenUpdateManager;
import sun.java2d.StateTracker;
import sun.java2d.SurfaceDataProxy;
import sun.java2d.pipe.hw.ExtendedBufferCapabilities;
/**
* This class describes a D3D "surface", that is, a region of pixels
* managed via D3D. An D3DSurfaceData can be tagged with one of three
* different SurfaceType objects for the purpose of registering loops, etc.
* This diagram shows the hierarchy of D3D SurfaceTypes:
*
* Any
* / \
* D3DSurface D3DTexture
* |
* D3DSurfaceRTT
*
* D3DSurface
* This kind of surface can be rendered to using D3D APIs. It is also
* possible to copy a D3DSurface to another D3DSurface (or to itself).
*
* D3DTexture
* This kind of surface cannot be rendered to using D3D (in the same sense
* as in D3DSurface). However, it is possible to upload a region of pixels
* to a D3DTexture object via Lock/UnlockRect(). One can also copy a
* surface of type D3DTexture to a D3DSurface by binding the texture
* to a quad and then rendering it to the destination surface (this process
* is known as "texture mapping").
*
* D3DSurfaceRTT
* This kind of surface can be thought of as a sort of hybrid between
* D3DSurface and D3DTexture, in that one can render to this kind of
* surface as if it were of type D3DSurface, but the process of copying
* this kind of surface to another is more like a D3DTexture. (Note that
* "RTT" stands for "render-to-texture".)
*
* In addition to these SurfaceType variants, we have also defined some
* constants that describe in more detail the type of underlying D3D
* surface. This table helps explain the relationships between those
* "type" constants and their corresponding SurfaceType:
*
* D3D Type Corresponding SurfaceType
* -------- -------------------------
* RT_PLAIN D3DSurface
* TEXTURE D3DTexture
* FLIP_BACKBUFFER D3DSurface
* RT_TEXTURE D3DSurfaceRTT
*/
public class D3DSurfaceData extends SurfaceData implements AccelSurface {
/**
* To be used with getNativeResource() only.
* @see #getNativeResource()
*/
public static final int D3D_DEVICE_RESOURCE= 100;
/*
* Surface types.
* We use these surface types when copying from a sw surface
* to a surface or texture.
*/
public static final int ST_INT_ARGB = 0;
public static final int ST_INT_ARGB_PRE = 1;
public static final int ST_INT_ARGB_BM = 2;
public static final int ST_INT_RGB = 3;
public static final int ST_INT_BGR = 4;
public static final int ST_USHORT_565_RGB = 5;
public static final int ST_USHORT_555_RGB = 6;
public static final int ST_BYTE_INDEXED = 7;
public static final int ST_BYTE_INDEXED_BM = 8;
public static final int ST_3BYTE_BGR = 9;
/** Equals to D3DSWAPEFFECT_DISCARD */
public static final int SWAP_DISCARD = 1;
/** Equals to D3DSWAPEFFECT_FLIP */
public static final int SWAP_FLIP = 2;
/** Equals to D3DSWAPEFFECT_COPY */
public static final int SWAP_COPY = 3;
/*
* SurfaceTypes
*/
private static final String DESC_D3D_SURFACE = "D3D Surface";
private static final String DESC_D3D_SURFACE_RTT =
"D3D Surface (render-to-texture)";
private static final String DESC_D3D_TEXTURE = "D3D Texture";
// REMIND: regarding ArgbPre??
static final SurfaceType D3DSurface =
SurfaceType.Any.deriveSubType(DESC_D3D_SURFACE,
PixelConverter.ArgbPre.instance);
static final SurfaceType D3DSurfaceRTT =
D3DSurface.deriveSubType(DESC_D3D_SURFACE_RTT);
static final SurfaceType D3DTexture =
SurfaceType.Any.deriveSubType(DESC_D3D_TEXTURE);
private int type;
private int width, height;
// these fields are set from the native code when the surface is
// initialized
private int nativeWidth, nativeHeight;
protected WComponentPeer peer;
private Image offscreenImage;
protected D3DGraphicsDevice graphicsDevice;
private int swapEffect;
private VSyncType syncType;
private int backBuffersNum;
private WritableRasterNative wrn;
protected static D3DRenderer d3dRenderPipe;
protected static PixelToParallelogramConverter d3dTxRenderPipe;
protected static ParallelogramPipe d3dAAPgramPipe;
protected static D3DTextRenderer d3dTextPipe;
protected static D3DDrawImage d3dImagePipe;
private native boolean initTexture(long pData, boolean isRTT,
boolean isOpaque);
private native boolean initFlipBackbuffer(long pData, long pPeerData,
int numbuffers,
int swapEffect, int syncType);
private native boolean initRTSurface(long pData, boolean isOpaque);
private native void initOps(int screen, int width, int height);
static {
D3DRenderQueue rq = D3DRenderQueue.getInstance();
d3dImagePipe = new D3DDrawImage();
d3dTextPipe = new D3DTextRenderer(rq);
d3dRenderPipe = new D3DRenderer(rq);
if (GraphicsPrimitive.tracingEnabled()) {
d3dTextPipe = d3dTextPipe.traceWrap();
d3dRenderPipe = d3dRenderPipe.traceWrap();
//The wrapped d3dRenderPipe will wrap the AA pipe as well...
//d3dAAPgramPipe = d3dRenderPipe.traceWrap();
}
d3dAAPgramPipe = d3dRenderPipe.getAAParallelogramPipe();
d3dTxRenderPipe =
new PixelToParallelogramConverter(d3dRenderPipe, d3dRenderPipe,
1.0, 0.25, true);
D3DBlitLoops.register();
D3DMaskFill.register();
D3DMaskBlit.register();
}
protected D3DSurfaceData(WComponentPeer peer, D3DGraphicsConfig gc,
int width, int height, Image image,
ColorModel cm, int numBackBuffers,
int swapEffect, VSyncType vSyncType,
int type)
{
super(getCustomSurfaceType(type), cm);
this.graphicsDevice = gc.getD3DDevice();
this.peer = peer;
this.type = type;
this.width = width;
this.height = height;
this.offscreenImage = image;
this.backBuffersNum = numBackBuffers;
this.swapEffect = swapEffect;
this.syncType = vSyncType;
initOps(graphicsDevice.getScreen(), width, height);
if (type == WINDOW) {
// we put the surface into the "lost"
// state; it will be restored by the D3DScreenUpdateManager
// prior to rendering to it for the first time. This is done
// so that vram is not wasted for surfaces never rendered to
setSurfaceLost(true);
} else {
initSurface();
}
setBlitProxyKey(gc.getProxyKey());
}
@Override
public SurfaceDataProxy makeProxyFor(SurfaceData srcData) {
return D3DSurfaceDataProxy.
createProxy(srcData,
(D3DGraphicsConfig)graphicsDevice.getDefaultConfiguration());
}
/**
* Creates a SurfaceData object representing the back buffer of a
* double-buffered on-screen Window.
*/
public static D3DSurfaceData createData(WComponentPeer peer, Image image) {
D3DGraphicsConfig gc = getGC(peer);
if (gc == null || !peer.isAccelCapable()) {
return null;
}
BufferCapabilities caps = peer.getBackBufferCaps();
VSyncType vSyncType = VSYNC_DEFAULT;
if (caps instanceof ExtendedBufferCapabilities) {
vSyncType = ((ExtendedBufferCapabilities)caps).getVSync();
}
Rectangle r = peer.getBounds();
BufferCapabilities.FlipContents flip = caps.getFlipContents();
int swapEffect;
if (flip == FlipContents.COPIED) {
swapEffect = SWAP_COPY;
} else if (flip == FlipContents.PRIOR) {
swapEffect = SWAP_FLIP;
} else { // flip == FlipContents.UNDEFINED || .BACKGROUND
swapEffect = SWAP_DISCARD;
}
return new D3DSurfaceData(peer, gc, r.width, r.height,
image, peer.getColorModel(),
peer.getBackBuffersNum(),
swapEffect, vSyncType, FLIP_BACKBUFFER);
}
/**
* Returns a WINDOW type of surface - a
* swap chain which serves as an on-screen surface,
* handled by the D3DScreenUpdateManager.
*
* Note that the native surface is not initialized
* when the surface is created to avoid using excessive
* resources, and the surface is placed into the lost
* state. It will be restored prior to any rendering
* to it.
*
* @param peer peer for which the onscreen surface is to be created
* @return a D3DWindowSurfaceData (flip chain) surface
*/
public static D3DSurfaceData createData(WComponentPeer peer) {
D3DGraphicsConfig gc = getGC(peer);
if (gc == null || !peer.isAccelCapable()) {
return null;
}
return new D3DWindowSurfaceData(peer, gc);
}
/**
* Creates a SurfaceData object representing an off-screen buffer (either
* a plain surface or Texture).
*/
public static D3DSurfaceData createData(D3DGraphicsConfig gc,
int width, int height,
ColorModel cm,
Image image, int type)
{
if (type == RT_TEXTURE) {
boolean isOpaque = cm.getTransparency() == Transparency.OPAQUE;
int cap = isOpaque ? CAPS_RT_TEXTURE_OPAQUE : CAPS_RT_TEXTURE_ALPHA;
if (!gc.getD3DDevice().isCapPresent(cap)) {
type = RT_PLAIN;
}
}
D3DSurfaceData ret = null;
try {
ret = new D3DSurfaceData(null, gc, width, height,
image, cm, 0, SWAP_DISCARD, VSYNC_DEFAULT,
type);
} catch (InvalidPipeException ipe) {
// try again - we might have ran out of vram, and rt textures
// could take up more than a plain surface, so it might succeed
if (type == RT_TEXTURE) {
// If a RT_TEXTURE was requested do not attempt to create a
// plain surface. (note that RT_TEXTURE can only be requested
// from a VI so the cast is safe)
if (((SunVolatileImage)image).getForcedAccelSurfaceType() !=
RT_TEXTURE)
{
type = RT_PLAIN;
ret = new D3DSurfaceData(null, gc, width, height,
image, cm, 0, SWAP_DISCARD,
VSYNC_DEFAULT, type);
}
}
}
return ret;
}
/**
* Returns the appropriate SurfaceType corresponding to the given D3D
* surface type constant (e.g. TEXTURE -> D3DTexture).
*/
private static SurfaceType getCustomSurfaceType(int d3dType) {
switch (d3dType) {
case TEXTURE:
return D3DTexture;
case RT_TEXTURE:
return D3DSurfaceRTT;
default:
return D3DSurface;
}
}
private boolean initSurfaceNow() {
boolean isOpaque = (getTransparency() == Transparency.OPAQUE);
switch (type) {
case RT_PLAIN:
return initRTSurface(getNativeOps(), isOpaque);
case TEXTURE:
return initTexture(getNativeOps(), false/*isRTT*/, isOpaque);
case RT_TEXTURE:
return initTexture(getNativeOps(), true/*isRTT*/, isOpaque);
// REMIND: we may want to pass the exact type to the native
// level here so that we could choose the right presentation
// interval for the frontbuffer (immediate vs v-synced)
case WINDOW:
case FLIP_BACKBUFFER:
return initFlipBackbuffer(getNativeOps(), peer.getData(),
backBuffersNum, swapEffect,
syncType.id());
default:
return false;
}
}
/**
* Initializes the appropriate D3D offscreen surface based on the value
* of the type parameter. If the surface creation fails for any reason,
* an OutOfMemoryError will be thrown.
*/
protected void initSurface() {
// any time we create or restore the surface, recreate the raster
synchronized (this) {
wrn = null;
}
// REMIND: somewhere a puppy died
class Status {
boolean success = false;
};
final Status status = new Status();
D3DRenderQueue rq = D3DRenderQueue.getInstance();
rq.lock();
try {
rq.flushAndInvokeNow(new Runnable() {
public void run() {
status.success = initSurfaceNow();
}
});
if (!status.success) {
throw new InvalidPipeException("Error creating D3DSurface");
}
} finally {
rq.unlock();
}
}
/**
* Returns the D3DContext for the GraphicsConfig associated with this
* surface.
*/
public final D3DContext getContext() {
return graphicsDevice.getContext();
}
/**
* Returns one of the surface type constants defined above.
*/
public final int getType() {
return type;
}
private static native int dbGetPixelNative(long pData, int x, int y);
private static native void dbSetPixelNative(long pData, int x, int y,
int pixel);
static class D3DDataBufferNative extends DataBufferNative {
int pixel;
protected D3DDataBufferNative(SurfaceData sData,
int type, int w, int h)
{
super(sData, type, w, h);
}
protected int getElem(final int x, final int y,
final SurfaceData sData)
{
if (sData.isSurfaceLost()) {
return 0;
}
int retPixel;
D3DRenderQueue rq = D3DRenderQueue.getInstance();
rq.lock();
try {
rq.flushAndInvokeNow(new Runnable() {
public void run() {
pixel = dbGetPixelNative(sData.getNativeOps(), x, y);
}
});
} finally {
retPixel = pixel;
rq.unlock();
}
return retPixel;
}
protected void setElem(final int x, final int y, final int pixel,
final SurfaceData sData)
{
if (sData.isSurfaceLost()) {
return;
}
D3DRenderQueue rq = D3DRenderQueue.getInstance();
rq.lock();
try {
rq.flushAndInvokeNow(new Runnable() {
public void run() {
dbSetPixelNative(sData.getNativeOps(), x, y, pixel);
}
});
sData.markDirty();
} finally {
rq.unlock();
}
}
}
public synchronized Raster getRaster(int x, int y, int w, int h) {
if (wrn == null) {
DirectColorModel dcm = (DirectColorModel)getColorModel();
SampleModel smHw;
int dataType = 0;
int scanStride = width;
if (dcm.getPixelSize() > 16) {
dataType = DataBuffer.TYPE_INT;
} else {
// 15, 16
dataType = DataBuffer.TYPE_USHORT;
}
// note that we have to use the surface width and height here,
// not the passed w,h
smHw = new SinglePixelPackedSampleModel(dataType, width, height,
scanStride, dcm.getMasks());
DataBuffer dbn = new D3DDataBufferNative(this, dataType,
width, height);
wrn = WritableRasterNative.createNativeRaster(smHw, dbn);
}
return wrn;
}
/**
* For now, we can only render LCD text if:
* - the pixel shaders are available, and
* - blending is disabled, and
* - the source color is opaque
* - and the destination is opaque
*/
public boolean canRenderLCDText(SunGraphics2D sg2d) {
return
graphicsDevice.isCapPresent(CAPS_LCD_SHADER) &&
sg2d.compositeState <= SunGraphics2D.COMP_ISCOPY &&
sg2d.paintState <= SunGraphics2D.PAINT_OPAQUECOLOR &&
sg2d.surfaceData.getTransparency() == Transparency.OPAQUE;
}
/**
* If acceleration should no longer be used for this surface.
* This implementation flags to the manager that it should no
* longer attempt to re-create a D3DSurface.
*/
void disableAccelerationForSurface() {
if (offscreenImage != null) {
SurfaceManager sm = SurfaceManager.getManager(offscreenImage);
if (sm instanceof D3DVolatileSurfaceManager) {
setSurfaceLost(true);
((D3DVolatileSurfaceManager)sm).setAccelerationEnabled(false);
}
}
}
public void validatePipe(SunGraphics2D sg2d) {
TextPipe textpipe;
boolean validated = false;
// REMIND: the D3D pipeline doesn't support XOR!, more
// fixes will be needed below. For now we disable D3D rendering
// for the surface which had any XOR rendering done to.
if (sg2d.compositeState >= sg2d.COMP_XOR) {
super.validatePipe(sg2d);
sg2d.imagepipe = d3dImagePipe;
disableAccelerationForSurface();
return;
}
// D3DTextRenderer handles both AA and non-AA text, but
// only works with the following modes:
// (Note: For LCD text we only enter this code path if
// canRenderLCDText() has already validated that the mode is
// CompositeType.SrcNoEa (opaque color), which will be subsumed
// by the CompositeType.SrcNoEa (any color) test below.)
if (/* CompositeType.SrcNoEa (any color) */
(sg2d.compositeState <= sg2d.COMP_ISCOPY &&
sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) ||
/* CompositeType.SrcOver (any color) */
(sg2d.compositeState == sg2d.COMP_ALPHA &&
sg2d.paintState <= sg2d.PAINT_ALPHACOLOR &&
(((AlphaComposite)sg2d.composite).getRule() ==
AlphaComposite.SRC_OVER)) ||
/* CompositeType.Xor (any color) */
(sg2d.compositeState == sg2d.COMP_XOR &&
sg2d.paintState <= sg2d.PAINT_ALPHACOLOR))
{
textpipe = d3dTextPipe;
} else {
// do this to initialize textpipe correctly; we will attempt
// to override the non-text pipes below
super.validatePipe(sg2d);
textpipe = sg2d.textpipe;
validated = true;
}
PixelToParallelogramConverter txPipe = null;
D3DRenderer nonTxPipe = null;
if (sg2d.antialiasHint != SunHints.INTVAL_ANTIALIAS_ON) {
if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) {
if (sg2d.compositeState <= sg2d.COMP_XOR) {
txPipe = d3dTxRenderPipe;
nonTxPipe = d3dRenderPipe;
}
} else if (sg2d.compositeState <= sg2d.COMP_ALPHA) {
if (D3DPaints.isValid(sg2d)) {
txPipe = d3dTxRenderPipe;
nonTxPipe = d3dRenderPipe;
}
// custom paints handled by super.validatePipe() below
}
} else {
if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) {
if (graphicsDevice.isCapPresent(CAPS_AA_SHADER) &&
(sg2d.imageComp == CompositeType.SrcOverNoEa ||
sg2d.imageComp == CompositeType.SrcOver))
{
if (!validated) {
super.validatePipe(sg2d);
validated = true;
}
PixelToParallelogramConverter aaConverter =
new PixelToParallelogramConverter(sg2d.shapepipe,
d3dAAPgramPipe,
1.0/8.0, 0.499,
false);
sg2d.drawpipe = aaConverter;
sg2d.fillpipe = aaConverter;
sg2d.shapepipe = aaConverter;
} else if (sg2d.compositeState == sg2d.COMP_XOR) {
// install the solid pipes when AA and XOR are both enabled
txPipe = d3dTxRenderPipe;
nonTxPipe = d3dRenderPipe;
}
}
// other cases handled by super.validatePipe() below
}
if (txPipe != null) {
if (sg2d.transformState >= sg2d.TRANSFORM_TRANSLATESCALE) {
sg2d.drawpipe = txPipe;
sg2d.fillpipe = txPipe;
} else if (sg2d.strokeState != sg2d.STROKE_THIN) {
sg2d.drawpipe = txPipe;
sg2d.fillpipe = nonTxPipe;
} else {
sg2d.drawpipe = nonTxPipe;
sg2d.fillpipe = nonTxPipe;
}
// Note that we use the transforming pipe here because it
// will examine the shape and possibly perform an optimized
// operation if it can be simplified. The simplifications
// will be valid for all STROKE and TRANSFORM types.
sg2d.shapepipe = txPipe;
} else {
if (!validated) {
super.validatePipe(sg2d);
}
}
// install the text pipe based on our earlier decision
sg2d.textpipe = textpipe;
// always override the image pipe with the specialized D3D pipe
sg2d.imagepipe = d3dImagePipe;
}
@Override
protected MaskFill getMaskFill(SunGraphics2D sg2d) {
if (sg2d.paintState > sg2d.PAINT_ALPHACOLOR) {
/*
* We can only accelerate non-Color MaskFill operations if
* all of the following conditions hold true:
* - there is an implementation for the given paintState
* - the current Paint can be accelerated for this destination
* - multitexturing is available (since we need to modulate
* the alpha mask texture with the paint texture)
*
* In all other cases, we return null, in which case the
* validation code will choose a more general software-based loop.
*/
if (!D3DPaints.isValid(sg2d) ||
!graphicsDevice.isCapPresent(CAPS_MULTITEXTURE))
{
return null;
}
}
return super.getMaskFill(sg2d);
}
@Override
public boolean copyArea(SunGraphics2D sg2d,
int x, int y, int w, int h, int dx, int dy)
{
if (sg2d.transformState < sg2d.TRANSFORM_TRANSLATESCALE &&
sg2d.compositeState < sg2d.COMP_XOR)
{
x += sg2d.transX;
y += sg2d.transY;
d3dRenderPipe.copyArea(sg2d, x, y, w, h, dx, dy);
return true;
}
return false;
}
@Override
public void flush() {
D3DRenderQueue rq = D3DRenderQueue.getInstance();
rq.lock();
try {
RenderBuffer buf = rq.getBuffer();
rq.ensureCapacityAndAlignment(12, 4);
buf.putInt(FLUSH_SURFACE);
buf.putLong(getNativeOps());
// this call is expected to complete synchronously, so flush now
rq.flushNow();
} finally {
rq.unlock();
}
}
/**
* Disposes the native resources associated with the given D3DSurfaceData
* (referenced by the pData parameter). This method is invoked from
* the native Dispose() method from the Disposer thread when the
* Java-level D3DSurfaceData object is about to go away.
*/
static void dispose(long pData) {
D3DRenderQueue rq = D3DRenderQueue.getInstance();
rq.lock();
try {
RenderBuffer buf = rq.getBuffer();
rq.ensureCapacityAndAlignment(12, 4);
buf.putInt(DISPOSE_SURFACE);
buf.putLong(pData);
// this call is expected to complete synchronously, so flush now
rq.flushNow();
} finally {
rq.unlock();
}
}
static void swapBuffers(D3DSurfaceData sd,
final int x1, final int y1,
final int x2, final int y2)
{
long pData = sd.getNativeOps();
D3DRenderQueue rq = D3DRenderQueue.getInstance();
// swapBuffers can be called from the toolkit thread by swing, we
// should detect this and prevent the deadlocks
if (rq.isRenderQueueThread()) {
if (!rq.tryLock()) {
// if we could not obtain the lock, repaint the area
// that was supposed to be swapped, and no-op this swap
final Component target = (Component)sd.getPeer().getTarget();
SunToolkit.executeOnEventHandlerThread(target, new Runnable() {
public void run() {
target.repaint(x1, y1, x2, y2);
}
});
return;
}
} else {
rq.lock();
}
try {
RenderBuffer buf = rq.getBuffer();
rq.ensureCapacityAndAlignment(28, 4);
buf.putInt(SWAP_BUFFERS);
buf.putLong(pData);
buf.putInt(x1);
buf.putInt(y1);
buf.putInt(x2);
buf.putInt(y2);
rq.flushNow();
} finally {
rq.unlock();
}
}
/**
* Returns destination Image associated with this SurfaceData.
*/
public Object getDestination() {
return offscreenImage;
}
public Rectangle getBounds() {
if (type == FLIP_BACKBUFFER || type == WINDOW) {
Rectangle r = peer.getBounds();
r.x = r.y = 0;
return r;
} else {
return new Rectangle(width, height);
}
}
public Rectangle getNativeBounds() {
D3DRenderQueue rq = D3DRenderQueue.getInstance();
// need to lock to make sure nativeWidth and Height are consistent
// since they are set from the render thread from the native
// level
rq.lock();
try {
// REMIND: use xyoffsets?
return new Rectangle(nativeWidth, nativeHeight);
} finally {
rq.unlock();
}
}
public GraphicsConfiguration getDeviceConfiguration() {
return graphicsDevice.getDefaultConfiguration();
}
public SurfaceData getReplacement() {
return restoreContents(offscreenImage);
}
private static D3DGraphicsConfig getGC(WComponentPeer peer) {
GraphicsConfiguration gc;
if (peer != null) {
gc = peer.getGraphicsConfiguration();
} else {
GraphicsEnvironment env =
GraphicsEnvironment.getLocalGraphicsEnvironment();
GraphicsDevice gd = env.getDefaultScreenDevice();
gc = gd.getDefaultConfiguration();
}
return (gc instanceof D3DGraphicsConfig) ? (D3DGraphicsConfig)gc : null;
}
/**
* Attempts to restore the surface by initializing the native data
*/
void restoreSurface() {
initSurface();
}
WComponentPeer getPeer() {
return peer;
}
/**
* We need to let the surface manager know that the surface is lost so
* that for example BufferStrategy.contentsLost() returns correct result.
* Normally the status of contentsLost is set in validate(), but in some
* cases (like Swing's buffer per window) we intentionally don't call
* validate from the toolkit thread but only check for the BS status.
*/
@Override
public void setSurfaceLost(boolean lost) {
super.setSurfaceLost(lost);
if (lost && offscreenImage != null) {
SurfaceManager sm = SurfaceManager.getManager(offscreenImage);
sm.acceleratedSurfaceLost();
}
}
private static native long getNativeResourceNative(long sdops, int resType);
/**
* Returns a pointer to the native resource of specified {@code resType}
* associated with this surface.
*
* Specifically, for {@code D3DSurfaceData} this method returns pointers of
* the following:
* <pre>
* TEXTURE - (IDirect3DTexture9*)
* RT_TEXTURE, RT_PLAIN - (IDirect3DSurface9*)
* FLIP_BACKBUFFER - (IDirect3DSwapChain9*)
* D3D_DEVICE_RESOURCE - (IDirect3DDevice9*)
* </pre>
*
* Multiple resources may be available for some types (i.e. for render to
* texture one could retrieve both a destination surface by specifying
* RT_TEXTURE, and a texture by using TEXTURE).
*
* Note: the pointer returned by this method is only valid on the rendering
* thread.
*
* @return pointer to the native resource of specified type or 0L if
* such resource doesn't exist or can not be retrieved.
* @see sun.java2d.pipe.hw.AccelSurface#getNativeResource
*/
public long getNativeResource(int resType) {
return getNativeResourceNative(getNativeOps(), resType);
}
/**
* Class representing an on-screen d3d surface. Since d3d can't
* render to the screen directly, it is implemented as a swap chain,
* controlled by D3DScreenUpdateManager.
*
* @see D3DScreenUpdateManager
*/
public static class D3DWindowSurfaceData extends D3DSurfaceData {
StateTracker dirtyTracker;
public D3DWindowSurfaceData(WComponentPeer peer,
D3DGraphicsConfig gc)
{
super(peer, gc,
peer.getBounds().width, peer.getBounds().height,
null, peer.getColorModel(), 1, SWAP_COPY, VSYNC_DEFAULT,
WINDOW);
dirtyTracker = getStateTracker();
}
/**
* {@inheritDoc}
*
* Overridden to use ScreenUpdateManager to obtain the replacement
* surface.
*
* @see sun.java2d.ScreenUpdateManager#getReplacementScreenSurface
*/
@Override
public SurfaceData getReplacement() {
ScreenUpdateManager mgr = ScreenUpdateManager.getInstance();
return mgr.getReplacementScreenSurface(peer, this);
}
/**
* Returns destination Component associated with this SurfaceData.
*/
@Override
public Object getDestination() {
return peer.getTarget();
}
@Override
void disableAccelerationForSurface() {
// for on-screen surfaces we need to make sure a backup GDI surface is
// is used until a new one is set (which may happen during a resize). We
// don't want the screen update maanger to replace the surface right way
// because it causes repainting issues in Swing, so we invalidate it,
// this will prevent SUM from issuing a replaceSurfaceData call.
setSurfaceLost(true);
invalidate();
flush();
peer.disableAcceleration();
ScreenUpdateManager.getInstance().dropScreenSurface(this);
}
@Override
void restoreSurface() {
if (!peer.isAccelCapable()) {
throw new InvalidPipeException("Onscreen acceleration " +
"disabled for this surface");
}
Window fsw = graphicsDevice.getFullScreenWindow();
if (fsw != null && fsw != peer.getTarget()) {
throw new InvalidPipeException("Can't restore onscreen surface"+
" when in full-screen mode");
}
super.restoreSurface();
// if initialization was unsuccessful, an IPE will be thrown
// and the surface will remain lost
setSurfaceLost(false);
// This is to make sure the render target is reset after this
// surface is restored. The reason for this is that sometimes this
// surface can be restored from multiple threads (the screen update
// manager's thread and app's rendering thread) at the same time,
// and when that happens the second restoration will create the
// native resource which will not be set as render target because
// the BufferedContext's validate method will think that since the
// surface data object didn't change then the current render target
// is correct and no rendering will appear on the screen.
D3DRenderQueue rq = D3DRenderQueue.getInstance();
rq.lock();
try {
getContext().invalidateContext();
} finally {
rq.unlock();
}
}
public boolean isDirty() {
return !dirtyTracker.isCurrent();
}
public void markClean() {
dirtyTracker = getStateTracker();
}
}
/**
* Updates the layered window with the contents of the surface.
*
* @param pd3dsd pointer to the D3DSDOps structure
* @param pData pointer to the AwtWindow peer data
* @param w width of the window
* @param h height of the window
* @see sun.awt.windows.TranslucentWindowPainter
*/
public static native boolean updateWindowAccelImpl(long pd3dsd, long pData,
int w, int h);
}
|
apache/parquet-java | 37,755 | parquet-variant/src/main/java/org/apache/parquet/variant/VariantUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.parquet.variant;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.HashMap;
/**
* This class defines constants related to the Variant format and provides functions for
* manipulating Variant binaries.
*
* A Variant is made up of 2 binaries: value and metadata. A Variant value consists of a one-byte
* header and a number of content bytes (can be zero). The header byte is divided into upper 6 bits
* (called "type info") and lower 2 bits (called "basic type"). The content format is explained in
* the below constants for all possible basic type and type info values.
*
* The Variant metadata includes a version id and a dictionary of distinct strings (case-sensitive).
* Its binary format is:
* - Version: 1-byte unsigned integer. The only acceptable value is 1 currently.
* - Dictionary size: 4-byte little-endian unsigned integer. The number of keys in the
* dictionary.
* - Offsets: (size + 1) * 4-byte little-endian unsigned integers. `offsets[i]` represents the
* starting position of string i, counting starting from the address of `offsets[0]`. Strings
* must be stored contiguously, so we don’t need to store the string size, instead, we compute it
* with `offset[i + 1] - offset[i]`.
* - UTF-8 string data.
*/
class VariantUtil {
static final int BASIC_TYPE_BITS = 2;
static final int BASIC_TYPE_MASK = 0b00000011;
static final int PRIMITIVE_TYPE_MASK = 0b00111111;
/** The inclusive maximum value of the type info value. It is the size limit of `SHORT_STR`. */
static final int MAX_SHORT_STR_SIZE = 0b00111111;
// The basic types
/**
* Primitive value.
* The type info value must be one of the values in the "Primitive" section below.
*/
static final int PRIMITIVE = 0;
/**
* Short string value.
* The type info value is the string size, which must be in `[0, MAX_SHORT_STR_SIZE]`.
* The string content bytes directly follow the header byte.
*/
static final int SHORT_STR = 1;
/**
* Object value.
* The content contains a size, a list of field ids, a list of field offsets, and
* the actual field values. The list of field ids has `size` ids, while the list of field offsets
* has `size + 1` offsets, where the last offset represents the total size of the field values
* data. The list of fields ids must be sorted by the field name in alphabetical order.
* Duplicate field names within one object are not allowed.
* 5 bits in the type info are used to specify the integer type of the object header. It is
* 0_b4_b3b2_b1b0 (most significant bit is 0), where:
* - b4: the integer type of size. When it is 0/1, `size` is a little-endian 1/4-byte
* unsigned integer.
* - b3b2: the integer type of ids. When the 2 bits are 0/1/2, the id list contains
* 1/2/3-byte little-endian unsigned integers.
* - b1b0: the integer type of offset. When the 2 bits are 0/1/2, the offset list contains
* 1/2/3-byte little-endian unsigned integers.
*/
static final int OBJECT = 2;
/**
* Array value.
* The content contains a size, a list of field offsets, and the actual element values.
* It is similar to an object without the id list. The length of the offset list
* is `size + 1`, where the last offset represent the total size of the element data.
* Its type info is: 000_b2_b1b0:
* - b2: the type of size.
* - b1b0: the integer type of offset.
*/
static final int ARRAY = 3;
// The primitive types
/** JSON Null value. Empty content. */
static final int NULL = 0;
/** True value. Empty content. */
static final int TRUE = 1;
/** False value. Empty content. */
static final int FALSE = 2;
/** 1-byte little-endian signed integer. */
static final int INT8 = 3;
/** 2-byte little-endian signed integer. */
static final int INT16 = 4;
/** 4-byte little-endian signed integer. */
static final int INT32 = 5;
/** 4-byte little-endian signed integer. */
static final int INT64 = 6;
/** 8-byte IEEE double. */
static final int DOUBLE = 7;
/** 4-byte decimal. Content is 1-byte scale + 4-byte little-endian signed integer. */
static final int DECIMAL4 = 8;
/** 8-byte decimal. Content is 1-byte scale + 8-byte little-endian signed integer. */
static final int DECIMAL8 = 9;
/** 16-byte decimal. Content is 1-byte scale + 16-byte little-endian signed integer. */
static final int DECIMAL16 = 10;
/**
* Date value. Content is 4-byte little-endian signed integer that represents the
* number of days from the Unix epoch.
*/
static final int DATE = 11;
/**
* Timestamp value. Content is 8-byte little-endian signed integer that represents the number of
* microseconds elapsed since the Unix epoch, 1970-01-01 00:00:00 UTC. It is displayed to users in
* their local time zones and may be displayed differently depending on the execution environment.
*/
static final int TIMESTAMP_TZ = 12;
/**
* Timestamp_ntz value. It has the same content as `TIMESTAMP` but should always be interpreted
* as if the local time zone is UTC.
*/
static final int TIMESTAMP_NTZ = 13;
/** 4-byte IEEE float. */
static final int FLOAT = 14;
/**
* Binary value. The content is (4-byte little-endian unsigned integer representing the binary
* size) + (size bytes of binary content).
*/
static final int BINARY = 15;
/**
* Long string value. The content is (4-byte little-endian unsigned integer representing the
* string size) + (size bytes of string content).
*/
static final int LONG_STR = 16;
/**
* Time value. Values can be from 00:00:00 to 23:59:59.999999.
* Content is 8-byte little-endian unsigned integer that represents the number of microseconds
* since midnight.
*/
static final int TIME = 17;
/**
* Timestamp nanos value. Similar to `TIMESTAMP_TZ`, but represents the number of nanoseconds
* elapsed since the Unix epoch, 1970-01-01 00:00:00 UTC.
*/
static final int TIMESTAMP_NANOS_TZ = 18;
/**
* Timestamp nanos (without timestamp) value. It has the same content as `TIMESTAMP_NANOS_TZ` but
* should always be interpreted as if the local time zone is UTC.
*/
static final int TIMESTAMP_NANOS_NTZ = 19;
/**
* UUID value. The content is a 16-byte binary, encoded using big-endian.
* For example, UUID 00112233-4455-6677-8899-aabbccddeeff is encoded as the bytes
* 00 11 22 33 44 55 66 77 88 99 aa bb cc dd ee ff.
*/
static final int UUID = 20;
// The metadata version.
static final byte VERSION = 1;
// The lower 4 bits of the first metadata byte contain the version.
static final byte VERSION_MASK = 0x0F;
// Constants for various unsigned integer sizes.
static final int U8_MAX = 0xFF;
static final int U16_MAX = 0xFFFF;
static final int U24_MAX = 0xFFFFFF;
static final int U8_SIZE = 1;
static final int U16_SIZE = 2;
static final int U24_SIZE = 3;
static final int U32_SIZE = 4;
// Max decimal precision for each decimal type.
static final int MAX_DECIMAL4_PRECISION = 9;
static final int MAX_DECIMAL8_PRECISION = 18;
static final int MAX_DECIMAL16_PRECISION = 38;
// The size (in bytes) of a UUID.
static final int UUID_SIZE = 16;
// header bytes
static final byte HEADER_NULL = primitiveHeader(NULL);
static final byte HEADER_LONG_STRING = primitiveHeader(LONG_STR);
static final byte HEADER_TRUE = primitiveHeader(TRUE);
static final byte HEADER_FALSE = primitiveHeader(FALSE);
static final byte HEADER_INT8 = primitiveHeader(INT8);
static final byte HEADER_INT16 = primitiveHeader(INT16);
static final byte HEADER_INT32 = primitiveHeader(INT32);
static final byte HEADER_INT64 = primitiveHeader(INT64);
static final byte HEADER_DOUBLE = primitiveHeader(DOUBLE);
static final byte HEADER_DECIMAL4 = primitiveHeader(DECIMAL4);
static final byte HEADER_DECIMAL8 = primitiveHeader(DECIMAL8);
static final byte HEADER_DECIMAL16 = primitiveHeader(DECIMAL16);
static final byte HEADER_DATE = primitiveHeader(DATE);
static final byte HEADER_TIMESTAMP_TZ = primitiveHeader(TIMESTAMP_TZ);
static final byte HEADER_TIMESTAMP_NTZ = primitiveHeader(TIMESTAMP_NTZ);
static final byte HEADER_TIME = primitiveHeader(TIME);
static final byte HEADER_TIMESTAMP_NANOS_TZ = primitiveHeader(TIMESTAMP_NANOS_TZ);
static final byte HEADER_TIMESTAMP_NANOS_NTZ = primitiveHeader(TIMESTAMP_NANOS_NTZ);
static final byte HEADER_FLOAT = primitiveHeader(FLOAT);
static final byte HEADER_BINARY = primitiveHeader(BINARY);
static final byte HEADER_UUID = primitiveHeader(UUID);
static byte primitiveHeader(int type) {
return (byte) (type << 2 | PRIMITIVE);
}
static byte shortStrHeader(int size) {
return (byte) (size << 2 | SHORT_STR);
}
static byte objectHeader(boolean largeSize, int idSize, int offsetSize) {
return (byte) (((largeSize ? 1 : 0) << (BASIC_TYPE_BITS + 4))
| ((idSize - 1) << (BASIC_TYPE_BITS + 2))
| ((offsetSize - 1) << BASIC_TYPE_BITS)
| OBJECT);
}
static byte arrayHeader(boolean largeSize, int offsetSize) {
return (byte) (((largeSize ? 1 : 0) << (BASIC_TYPE_BITS + 2)) | ((offsetSize - 1) << BASIC_TYPE_BITS) | ARRAY);
}
/**
* Check the validity of an array index `pos`.
* @param pos The index to check
* @param length The length of the array
* @throws IllegalArgumentException if the index is out of bound
*/
static void checkIndex(int pos, int length) {
if (pos < 0 || pos >= length) {
throw new IllegalArgumentException(
String.format("Invalid byte-array offset (%d). length: %d", pos, length));
}
}
/**
* Write the least significant `numBytes` bytes in `value` into `bytes[pos, pos + numBytes)` in
* little endian.
* @param bytes The byte array to write into
* @param pos The starting index of the byte array to write into
* @param value The value to write
* @param numBytes The number of bytes to write
*/
static void writeLong(byte[] bytes, int pos, long value, int numBytes) {
for (int i = 0; i < numBytes; ++i) {
bytes[pos + i] = (byte) ((value >>> (8 * i)) & 0xFF);
}
}
/**
* Reads a little-endian signed long value from `buffer[pos, pos + numBytes)`.
* @param buffer The ByteBuffer to read from
* @param pos The starting index of the buffer to read from
* @param numBytes The number of bytes to read
* @return The long value
*/
static long readLong(ByteBuffer buffer, int pos, int numBytes) {
checkIndex(pos, buffer.limit());
checkIndex(pos + numBytes - 1, buffer.limit());
long result = 0;
// All bytes except the most significant byte should be unsigned-extended and shifted
// (so we need & 0xFF`). The most significant byte should be sign-extended and is handled
// after the loop.
for (int i = 0; i < numBytes - 1; ++i) {
long unsignedByteValue = buffer.get(pos + i) & 0xFF;
result |= unsignedByteValue << (8 * i);
}
long signedByteValue = buffer.get(pos + numBytes - 1);
result |= signedByteValue << (8 * (numBytes - 1));
return result;
}
/**
* Read a little-endian unsigned int value from `bytes[pos, pos + numBytes)`. The value must fit
* into a non-negative int (`[0, Integer.MAX_VALUE]`).
*/
static int readUnsigned(ByteBuffer bytes, int pos, int numBytes) {
checkIndex(pos, bytes.limit());
checkIndex(pos + numBytes - 1, bytes.limit());
int result = 0;
// Similar to the `readLong` loop, but all bytes should be unsigned-extended.
for (int i = 0; i < numBytes; ++i) {
int unsignedByteValue = bytes.get(pos + i) & 0xFF;
result |= unsignedByteValue << (8 * i);
}
if (result < 0) {
throw new IllegalArgumentException(String.format("Failed to read unsigned int. numBytes: %d", numBytes));
}
return result;
}
/**
* Returns the value type of Variant value `value[pos...]`. It is only legal to call `get*` if
* `getType` returns the corresponding type. For example, it is only legal to call
* `getLong` if this method returns `Type.Long`.
* @param value The Variant value to get the type from
* @return The type of the Variant value
*/
static Variant.Type getType(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
switch (basicType) {
case SHORT_STR:
return Variant.Type.STRING;
case OBJECT:
return Variant.Type.OBJECT;
case ARRAY:
return Variant.Type.ARRAY;
default:
switch (typeInfo) {
case NULL:
return Variant.Type.NULL;
case TRUE:
case FALSE:
return Variant.Type.BOOLEAN;
case INT8:
return Variant.Type.BYTE;
case INT16:
return Variant.Type.SHORT;
case INT32:
return Variant.Type.INT;
case INT64:
return Variant.Type.LONG;
case DOUBLE:
return Variant.Type.DOUBLE;
case DECIMAL4:
return Variant.Type.DECIMAL4;
case DECIMAL8:
return Variant.Type.DECIMAL8;
case DECIMAL16:
return Variant.Type.DECIMAL16;
case DATE:
return Variant.Type.DATE;
case TIMESTAMP_TZ:
return Variant.Type.TIMESTAMP_TZ;
case TIMESTAMP_NTZ:
return Variant.Type.TIMESTAMP_NTZ;
case FLOAT:
return Variant.Type.FLOAT;
case BINARY:
return Variant.Type.BINARY;
case LONG_STR:
return Variant.Type.STRING;
case TIME:
return Variant.Type.TIME;
case TIMESTAMP_NANOS_TZ:
return Variant.Type.TIMESTAMP_NANOS_TZ;
case TIMESTAMP_NANOS_NTZ:
return Variant.Type.TIMESTAMP_NANOS_NTZ;
case UUID:
return Variant.Type.UUID;
default:
throw new UnsupportedOperationException(
String.format("Unknown type in Variant. primitive type: %d", typeInfo));
}
}
}
/**
* Returns the debug string representation of the type of the Variant value `value[pos...]`.
* @param value The Variant value to get the type from
* @return The String representation of the type of the Variant value
*/
private static String getTypeDebugString(ByteBuffer value) {
try {
return getType(value).toString();
} catch (Exception e) {
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int valueHeader = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
return String.format("unknownType(basicType: %d, valueHeader: %d)", basicType, valueHeader);
}
}
private static IllegalArgumentException unexpectedType(Variant.Type type, ByteBuffer actualValue) {
String actualType = getTypeDebugString(actualValue);
return new IllegalArgumentException(String.format("Cannot read %s value as %s", actualType, type));
}
private static IllegalArgumentException unexpectedType(Variant.Type[] types, ByteBuffer actualValue) {
String actualType = getTypeDebugString(actualValue);
return new IllegalArgumentException(
String.format("Cannot read %s value as one of %s", actualType, Arrays.toString(types)));
}
static boolean getBoolean(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE || (typeInfo != TRUE && typeInfo != FALSE)) {
throw unexpectedType(Variant.Type.BOOLEAN, value);
}
return typeInfo == TRUE;
}
/**
* Returns a long value from Variant value `value[pos...]`.
* It is only legal to call it if `getType` returns one of Type.BYTE, SHORT, INT, LONG,
* DATE, TIMESTAMP_TZ, TIMESTAMP_NTZ, TIME, TIMESTAMP_NANOS_TZ, TIMESTAMP_NANOS_NTZ.
* If the type is `DATE`, the return value is guaranteed to fit into an int and
* represents the number of days from the Unix epoch.
* If the type is `TIMESTAMP_TZ/TIMESTAMP_NTZ`, the return value represents the number of
* microseconds from the Unix epoch.
* If the type is `TIME`, the return value represents the number of microseconds since midnight.
* If the type is `TIMESTAMP_NANOS_TZ/TIMESTAMP_NANOS_NTZ`, the return value represents the number
* of nanoseconds from the Unix epoch.
* @param value The Variant value
* @return The long value
*/
static long getLong(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE) {
throw unexpectedType(
new Variant.Type[] {
Variant.Type.BYTE,
Variant.Type.SHORT,
Variant.Type.INT,
Variant.Type.DATE,
Variant.Type.LONG,
Variant.Type.TIMESTAMP_TZ,
Variant.Type.TIMESTAMP_NTZ,
Variant.Type.TIME,
Variant.Type.TIMESTAMP_NANOS_TZ,
Variant.Type.TIMESTAMP_NANOS_NTZ
},
value);
}
switch (typeInfo) {
case INT8:
return readLong(value, value.position() + 1, 1);
case INT16:
return readLong(value, value.position() + 1, 2);
case INT32:
case DATE:
return readLong(value, value.position() + 1, 4);
case INT64:
case TIMESTAMP_TZ:
case TIMESTAMP_NTZ:
case TIME:
case TIMESTAMP_NANOS_TZ:
case TIMESTAMP_NANOS_NTZ:
return readLong(value, value.position() + 1, 8);
default:
throw unexpectedType(
new Variant.Type[] {
Variant.Type.BYTE,
Variant.Type.SHORT,
Variant.Type.INT,
Variant.Type.DATE,
Variant.Type.LONG,
Variant.Type.TIMESTAMP_TZ,
Variant.Type.TIMESTAMP_NTZ,
Variant.Type.TIME,
Variant.Type.TIMESTAMP_NANOS_TZ,
Variant.Type.TIMESTAMP_NANOS_NTZ
},
value);
}
}
/**
* Similar to getLong(), but for the types: Type.BYTE, SHORT, INT, DATE.
* @param value The Variant value
* @return The int value
*/
static int getInt(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE) {
throw unexpectedType(
new Variant.Type[] {Variant.Type.BYTE, Variant.Type.SHORT, Variant.Type.INT, Variant.Type.DATE},
value);
}
switch (typeInfo) {
case INT8:
return (int) readLong(value, value.position() + 1, 1);
case INT16:
return (int) readLong(value, value.position() + 1, 2);
case INT32:
case DATE:
return (int) readLong(value, value.position() + 1, 4);
default:
throw unexpectedType(
new Variant.Type[] {Variant.Type.BYTE, Variant.Type.SHORT, Variant.Type.INT, Variant.Type.DATE},
value);
}
}
/**
* Similar to getLong(), but for the types: Type.BYTE, SHORT.
* @param value The Variant value
* @return The short value
*/
static short getShort(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE) {
throw unexpectedType(new Variant.Type[] {Variant.Type.BYTE, Variant.Type.SHORT}, value);
}
switch (typeInfo) {
case INT8:
return (short) readLong(value, value.position() + 1, 1);
case INT16:
return (short) readLong(value, value.position() + 1, 2);
default:
throw unexpectedType(new Variant.Type[] {Variant.Type.BYTE, Variant.Type.SHORT}, value);
}
}
/**
* Similar to getLong(), but for the types: Type.BYTE, SHORT.
* @param value The Variant value
* @return The short value
*/
static byte getByte(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE) {
throw unexpectedType(Variant.Type.BYTE, value);
}
switch (typeInfo) {
case INT8:
return (byte) readLong(value, value.position() + 1, 1);
default:
throw unexpectedType(Variant.Type.BYTE, value);
}
}
static double getDouble(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE || typeInfo != DOUBLE) {
throw unexpectedType(Variant.Type.DOUBLE, value);
}
return Double.longBitsToDouble(readLong(value, value.position() + 1, 8));
}
static BigDecimal getDecimalWithOriginalScale(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE) {
throw unexpectedType(
new Variant.Type[] {Variant.Type.DECIMAL4, Variant.Type.DECIMAL8, Variant.Type.DECIMAL16}, value);
}
// Interpret the scale byte as unsigned. If it is a negative byte, the unsigned value must be
// greater than `MAX_DECIMAL16_PRECISION` and will trigger an error in `checkDecimal`.
int scale = value.get(value.position() + 1) & 0xFF;
BigDecimal result;
switch (typeInfo) {
case DECIMAL4:
result = BigDecimal.valueOf(readLong(value, value.position() + 2, 4), scale);
break;
case DECIMAL8:
result = BigDecimal.valueOf(readLong(value, value.position() + 2, 8), scale);
break;
case DECIMAL16:
checkIndex(value.position() + 17, value.limit());
byte[] bytes = new byte[16];
// Copy the bytes reversely because the `BigInteger` constructor expects a big-endian
// representation.
for (int i = 0; i < 16; ++i) {
bytes[i] = value.get(value.position() + 17 - i);
}
result = new BigDecimal(new BigInteger(bytes), scale);
break;
default:
throw unexpectedType(
new Variant.Type[] {Variant.Type.DECIMAL4, Variant.Type.DECIMAL8, Variant.Type.DECIMAL16},
value);
}
return result;
}
static BigDecimal getDecimal(ByteBuffer value) {
return getDecimalWithOriginalScale(value);
}
static float getFloat(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE || typeInfo != FLOAT) {
throw unexpectedType(Variant.Type.FLOAT, value);
}
return Float.intBitsToFloat((int) readLong(value, value.position() + 1, 4));
}
static ByteBuffer getBinary(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE || typeInfo != BINARY) {
throw unexpectedType(Variant.Type.BINARY, value);
}
int start = value.position() + 1 + U32_SIZE;
int length = readUnsigned(value, value.position() + 1, U32_SIZE);
checkIndex(start + length - 1, value.limit());
return slice(value, start);
}
static String getString(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType == SHORT_STR || (basicType == PRIMITIVE && typeInfo == LONG_STR)) {
int start;
int length;
if (basicType == SHORT_STR) {
start = value.position() + 1;
length = typeInfo;
} else {
start = value.position() + 1 + U32_SIZE;
length = readUnsigned(value, value.position() + 1, U32_SIZE);
}
checkIndex(start + length - 1, value.limit());
if (value.hasArray()) {
// If the buffer is backed by an array, we can use the array directly.
return new String(value.array(), value.arrayOffset() + start, length);
} else {
// If the buffer is not backed by an array, we need to copy the bytes into a new array.
byte[] valueArray = new byte[length];
slice(value, start).get(valueArray);
return new String(valueArray);
}
}
throw unexpectedType(Variant.Type.STRING, value);
}
static java.util.UUID getUUID(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != PRIMITIVE || typeInfo != UUID) {
throw unexpectedType(Variant.Type.UUID, value);
}
int start = value.position() + 1;
checkIndex(start + UUID_SIZE - 1, value.limit());
ByteBuffer bb = slice(value, start).order(ByteOrder.BIG_ENDIAN);
return new java.util.UUID(bb.getLong(), bb.getLong());
}
/**
* Slices the `value` buffer starting from `start` index.
* @param value The ByteBuffer to slice
* @param start The starting index of the slice
* @return The sliced ByteBuffer
*/
static ByteBuffer slice(ByteBuffer value, int start) {
ByteBuffer newSlice = value.duplicate();
newSlice.position(start);
return newSlice;
}
/**
* A helper class representing the details of a Variant object, used for `ObjectHandler`.
*/
static class ObjectInfo {
/** Number of object fields. */
public final int numElements;
/** The integer size of the field id list. */
public final int idSize;
/** The integer size of the offset list. */
public final int offsetSize;
/** The byte offset (from the beginning of the Variant object) of the field id list. */
public final int idStartOffset;
/** The byte offset (from the beginning of the Variant object) of the offset list. */
public final int offsetStartOffset;
/** The byte offset (from the beginning of the Variant object) of the field data. */
public final int dataStartOffset;
public ObjectInfo(
int numElements,
int idSize,
int offsetSize,
int idStartOffset,
int offsetStartOffset,
int dataStartOffset) {
this.numElements = numElements;
this.idSize = idSize;
this.offsetSize = offsetSize;
this.idStartOffset = idStartOffset;
this.offsetStartOffset = offsetStartOffset;
this.dataStartOffset = dataStartOffset;
}
}
/**
* Parses the object at `value[pos...]`, and returns the object details.
*/
static ObjectInfo getObjectInfo(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != OBJECT) {
throw unexpectedType(Variant.Type.OBJECT, value);
}
// Refer to the comment of the `OBJECT` constant for the details of the object header encoding.
// Suppose `typeInfo` has a bit representation of 0_b4_b3b2_b1b0, the following line extracts
// b4 to determine whether the object uses a 1/4-byte size.
boolean largeSize = ((typeInfo >> 4) & 0x1) != 0;
int sizeBytes = (largeSize ? U32_SIZE : 1);
int numElements = readUnsigned(value, value.position() + 1, sizeBytes);
// Extracts b3b2 to determine the integer size of the field id list.
int idSize = ((typeInfo >> 2) & 0x3) + 1;
// Extracts b1b0 to determine the integer size of the offset list.
int offsetSize = (typeInfo & 0x3) + 1;
int idStartOffset = 1 + sizeBytes;
int offsetStartOffset = idStartOffset + numElements * idSize;
int dataStartOffset = offsetStartOffset + (numElements + 1) * offsetSize;
return new ObjectInfo(numElements, idSize, offsetSize, idStartOffset, offsetStartOffset, dataStartOffset);
}
/**
* A helper class representing the details of a Variant array, used for `ArrayHandler`.
*/
static class ArrayInfo {
/** Number of object fields. */
public final int numElements;
/** The integer size of the offset list. */
public final int offsetSize;
/** The byte offset (from the beginning of the Variant array) of the offset list. */
public final int offsetStartOffset;
/** The byte offset (from the beginning of the Variant array) of the field data. */
public final int dataStartOffset;
public ArrayInfo(int numElements, int offsetSize, int offsetStartOffset, int dataStartOffset) {
this.numElements = numElements;
this.offsetSize = offsetSize;
this.offsetStartOffset = offsetStartOffset;
this.dataStartOffset = dataStartOffset;
}
}
/**
* Parses the array at `value[pos...]`, and returns the array details.
*/
static ArrayInfo getArrayInfo(ByteBuffer value) {
checkIndex(value.position(), value.limit());
int basicType = value.get(value.position()) & BASIC_TYPE_MASK;
int typeInfo = (value.get(value.position()) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
if (basicType != ARRAY) {
throw unexpectedType(Variant.Type.ARRAY, value);
}
// Refer to the comment of the `ARRAY` constant for the details of the object header encoding.
// Suppose `typeInfo` has a bit representation of 000_b2_b1b0, the following line extracts
// b2 to determine whether the object uses a 1/4-byte size.
boolean largeSize = ((typeInfo >> 2) & 0x1) != 0;
int sizeBytes = (largeSize ? U32_SIZE : 1);
int numElements = readUnsigned(value, value.position() + 1, sizeBytes);
// Extracts b1b0 to determine the integer size of the offset list.
int offsetSize = (typeInfo & 0x3) + 1;
int offsetStartOffset = 1 + sizeBytes;
int dataStartOffset = offsetStartOffset + (numElements + 1) * offsetSize;
return new ArrayInfo(numElements, offsetSize, offsetStartOffset, dataStartOffset);
}
/**
* Returns a key at `id` in the Variant metadata.
*
* @param metadata The Variant metadata
* @param id The key id
* @return The key
* @throws IllegalArgumentException if the id is out of bound
* @throws IllegalStateException if the encoded metadata is malformed
*/
static String getMetadataKey(ByteBuffer metadata, int id) {
// Extracts the highest 2 bits in the metadata header to determine the integer size of the
// offset list.
int offsetSize = ((metadata.get(metadata.position()) >> 6) & 0x3) + 1;
int dictSize = readUnsigned(metadata, metadata.position() + 1, offsetSize);
if (id >= dictSize) {
throw new IllegalArgumentException(
String.format("Invalid dictionary id: %d. dictionary size: %d", id, dictSize));
}
// The offset list after the header byte, and a `dictSize` with `offsetSize` bytes.
int offsetListPos = metadata.position() + 1 + offsetSize;
// The data starts after the offset list, and `(dictSize + 1)` offset values.
int dataPos = offsetListPos + (dictSize + 1) * offsetSize;
int offset = readUnsigned(metadata, offsetListPos + (id) * offsetSize, offsetSize);
int nextOffset = readUnsigned(metadata, offsetListPos + (id + 1) * offsetSize, offsetSize);
if (offset > nextOffset) {
throw new IllegalStateException(String.format("Invalid offset: %d. next offset: %d", offset, nextOffset));
}
checkIndex(dataPos + nextOffset - 1, metadata.limit());
if (metadata.hasArray() && !metadata.isReadOnly()) {
return new String(metadata.array(), metadata.arrayOffset() + dataPos + offset, nextOffset - offset);
} else {
// ByteBuffer does not have an array, so we need to use the `get` method to read the bytes.
byte[] metadataArray = new byte[nextOffset - offset];
slice(metadata, dataPos + offset).get(metadataArray);
return new String(metadataArray);
}
}
/**
* Returns a map from each string to its ID in the Variant metadata.
* @param metadata The Variant metadata
* @return A map from metadata key to its position.
*/
static HashMap<String, Integer> getMetadataMap(ByteBuffer metadata) {
int pos = metadata.position();
checkIndex(pos, metadata.limit());
// Extracts the highest 2 bits in the metadata header to determine the integer size of the
// offset list.
int offsetSize = ((metadata.get(pos) >> 6) & 0x3) + 1;
int dictSize = readUnsigned(metadata, pos + 1, offsetSize);
HashMap<String, Integer> result = new HashMap<>();
int offset = readUnsigned(metadata, pos + 1 + offsetSize, offsetSize);
for (int id = 0; id < dictSize; id++) {
int stringStart = 1 + (dictSize + 2) * offsetSize;
int nextOffset = readUnsigned(metadata, pos + 1 + (id + 2) * offsetSize, offsetSize);
if (offset > nextOffset) {
throw new UnsupportedOperationException(
String.format("Invalid offset: %d. next offset: %d", offset, nextOffset));
}
checkIndex(pos + stringStart + nextOffset - 1, metadata.limit());
if (metadata.hasArray() && !metadata.isReadOnly()) {
result.put(
new String(
metadata.array(),
metadata.arrayOffset() + pos + stringStart + offset,
nextOffset - offset),
id);
} else {
// ByteBuffer does not have an array, so we need to use the `get` method to read the bytes.
byte[] metadataArray = new byte[nextOffset - offset];
slice(metadata, stringStart + offset).get(metadataArray);
result.put(new String(metadataArray), id);
}
offset = nextOffset;
}
return result;
}
/**
* Computes the actual size (in bytes) of the Variant value.
* @param value The Variant value binary
* @return The size (in bytes) of the Variant value, including the header byte
*/
public static int valueSize(ByteBuffer value) {
int pos = value.position();
int basicType = value.get(pos) & BASIC_TYPE_MASK;
switch (basicType) {
case SHORT_STR:
int stringSize = (value.get(pos) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
return 1 + stringSize;
case OBJECT: {
VariantUtil.ObjectInfo info = VariantUtil.getObjectInfo(slice(value, pos));
return info.dataStartOffset
+ readUnsigned(
value,
pos + info.offsetStartOffset + info.numElements * info.offsetSize,
info.offsetSize);
}
case ARRAY: {
VariantUtil.ArrayInfo info = VariantUtil.getArrayInfo(slice(value, pos));
return info.dataStartOffset
+ readUnsigned(
value,
pos + info.offsetStartOffset + info.numElements * info.offsetSize,
info.offsetSize);
}
default: {
int typeInfo = (value.get(pos) >> BASIC_TYPE_BITS) & PRIMITIVE_TYPE_MASK;
switch (typeInfo) {
case NULL:
case TRUE:
case FALSE:
return 1;
case INT8:
return 2;
case INT16:
return 3;
case INT32:
case DATE:
case FLOAT:
return 5;
case INT64:
case DOUBLE:
case TIMESTAMP_TZ:
case TIMESTAMP_NTZ:
case TIME:
case TIMESTAMP_NANOS_TZ:
case TIMESTAMP_NANOS_NTZ:
return 9;
case DECIMAL4:
return 6;
case DECIMAL8:
return 10;
case DECIMAL16:
return 18;
case BINARY:
case LONG_STR:
return 1 + U32_SIZE + readUnsigned(value, pos + 1, U32_SIZE);
case UUID:
return 1 + UUID_SIZE;
default:
throw new UnsupportedOperationException(
String.format("Unknown type in Variant. primitive type: %d", typeInfo));
}
}
}
}
}
|
apache/commons-numbers | 37,798 | commons-numbers-fraction/src/test/java/org/apache/commons/numbers/fraction/BigFractionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.numbers.fraction;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.Arrays;
import org.apache.commons.numbers.core.TestUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/**
* Tests for {@link BigFraction}.
*/
class BigFractionTest {
/** The zero representation with positive denominator. */
private static final BigFraction ZERO_P = BigFraction.of(0, 1);
/** The zero representation with negative denominator. */
private static final BigFraction ZERO_N = BigFraction.of(0, -1);
private static void assertFraction(int expectedNumerator, int expectedDenominator, BigFraction actual) {
Assertions.assertEquals(expectedNumerator, actual.getNumeratorAsInt());
Assertions.assertEquals(expectedDenominator, actual.getDenominatorAsInt());
Assertions.assertEquals(
Integer.signum(expectedNumerator) * Integer.signum(expectedDenominator),
actual.signum());
}
private static void assertFraction(long expectedNumerator, long expectedDenominator, BigFraction actual) {
Assertions.assertEquals(expectedNumerator, actual.getNumeratorAsLong());
Assertions.assertEquals(expectedDenominator, actual.getDenominatorAsLong());
Assertions.assertEquals(
Long.signum(expectedNumerator) * Long.signum(expectedDenominator),
actual.signum());
}
private static void assertFraction(BigInteger expectedNumerator, BigInteger expectedDenominator, BigFraction actual) {
Assertions.assertEquals(expectedNumerator, actual.getNumerator());
Assertions.assertEquals(expectedDenominator, actual.getDenominator());
Assertions.assertEquals(
expectedNumerator.signum() * expectedDenominator.signum(),
actual.signum());
}
private static void assertDoubleValue(double expected, BigInteger numerator, BigInteger denominator) {
final BigFraction f = BigFraction.of(numerator, denominator);
Assertions.assertEquals(expected, f.doubleValue());
}
private static void assertDoubleValue(double expected, long numerator, long denominator) {
assertDoubleValue(expected, BigInteger.valueOf(numerator), BigInteger.valueOf(denominator));
}
@Test
void testConstructor() {
for (final CommonTestCases.UnaryOperatorTestCase testCase : CommonTestCases.numDenConstructorTestCases()) {
assertFraction(
testCase.expectedNumerator,
testCase.expectedDenominator,
BigFraction.of(testCase.operandNumerator, testCase.operandDenominator)
);
}
// Long/BigInteger arguments
assertFraction(0, 1, BigFraction.of(0L, 2L));
assertFraction(1L, 1, BigFraction.of(1L));
assertFraction(11, 1, BigFraction.of(11L));
assertFraction(11, 1, BigFraction.of(new BigInteger("11")));
// Divide by zero
Assertions.assertThrows(ArithmeticException.class, () -> BigFraction.of(BigInteger.ONE, BigInteger.ZERO));
// Null pointers
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.of(null, BigInteger.ONE));
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.of(BigInteger.ONE, null));
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.of(null));
Assertions.assertThrows(ArithmeticException.class,
() -> BigFraction.from(2.0 * Integer.MAX_VALUE, 1.0e-5, 100000));
}
@Test
void testConstructorZero() {
Assertions.assertSame(BigFraction.ZERO, BigFraction.from(0.0));
Assertions.assertSame(BigFraction.ZERO, BigFraction.from(0.0, 1e-10, 100));
Assertions.assertSame(BigFraction.ZERO, BigFraction.from(0.0, 100));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(0));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(0L));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(BigInteger.ZERO));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(0, 1));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(0, -1));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(0L, 1L));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(0L, -1L));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(BigInteger.ZERO, BigInteger.ONE));
Assertions.assertSame(BigFraction.ZERO, BigFraction.of(BigInteger.ZERO, BigInteger.ONE.negate()));
}
// MATH-179
@Test
void testDoubleConstructor() {
for (final CommonTestCases.DoubleToFractionTestCase testCase : CommonTestCases.doubleConstructorTestCases()) {
assertFraction(
testCase.expectedNumerator,
testCase.expectedDenominator,
BigFraction.from(testCase.operand, 1.0e-5, 100)
);
}
// Cases with different exact results from Fraction
assertFraction(6004799503160661L, 18014398509481984L, BigFraction.from(1.0 / 3.0));
assertFraction(6124895493223875L, 36028797018963968L, BigFraction.from(17.0 / 100.0));
assertFraction(1784551352345559L, 562949953421312L, BigFraction.from(317.0 / 100.0));
assertFraction(-6004799503160661L, 18014398509481984L, BigFraction.from(-1.0 / 3.0));
assertFraction(-6124895493223875L, 36028797018963968L, BigFraction.from(17.0 / -100.0));
assertFraction(-1784551352345559L, 562949953421312L, BigFraction.from(-317.0 / 100.0));
// Extreme double values
Assertions.assertEquals(1L, BigFraction.from(Double.MAX_VALUE).getDenominatorAsLong());
Assertions.assertEquals(1L, BigFraction.from(Double.longBitsToDouble(0x0010000000000000L)).getNumeratorAsLong());
assertFraction(BigInteger.ONE, BigInteger.ONE.shiftLeft(1074), BigFraction.from(Double.MIN_VALUE));
// Check exact round-trip of double
Assertions.assertEquals(0.00000000000001, BigFraction.from(0.00000000000001).doubleValue());
Assertions.assertEquals(0.40000000000001, BigFraction.from(0.40000000000001).doubleValue());
Assertions.assertEquals(15.0000000000001, BigFraction.from(15.0000000000001).doubleValue());
// Check the representation
assertFraction(3602879701896487L, 9007199254740992L, BigFraction.from(0.40000000000001));
assertFraction(1055531162664967L, 70368744177664L, BigFraction.from(15.0000000000001));
}
// MATH-181
// NUMBERS-147
@Test
void testDoubleConstructorWithMaxDenominator() {
for (final CommonTestCases.DoubleToFractionTestCase testCase : CommonTestCases.doubleMaxDenomConstructorTestCases()) {
assertFraction(
testCase.expectedNumerator,
testCase.expectedDenominator,
BigFraction.from(testCase.operand, testCase.maxDenominator)
);
}
// Cases with different exact results from Fraction
final long pow31 = 1L << 31;
assertFraction(pow31, 1, BigFraction.from(Integer.MIN_VALUE * -1.0, 2));
assertFraction(pow31, 3, BigFraction.from(Integer.MIN_VALUE / -3.0, 10));
assertFraction(-1, pow31, BigFraction.from(1.0 / Integer.MIN_VALUE, Integer.MIN_VALUE));
assertFraction(1, pow31, BigFraction.from(-1.0 / Integer.MIN_VALUE, Integer.MIN_VALUE));
Assertions.assertThrows(IllegalArgumentException.class, () -> BigFraction.from(1.0, 0));
}
@Test
void testDoubleConstructorThrows() {
final double eps = 1e-5;
final int maxIterations = Integer.MAX_VALUE;
final int maxDenominator = Integer.MAX_VALUE;
for (final double value : new double[] {Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY}) {
Assertions.assertThrows(IllegalArgumentException.class, () -> BigFraction.from(value));
Assertions.assertThrows(IllegalArgumentException.class, () -> BigFraction.from(value, eps, maxIterations));
Assertions.assertThrows(IllegalArgumentException.class, () -> BigFraction.from(value, maxDenominator));
}
Assertions.assertThrows(IllegalArgumentException.class, () -> BigFraction.from(1.0, Double.NaN, maxIterations));
Assertions.assertThrows(IllegalArgumentException.class, () -> BigFraction.from(1.0, -1.0, maxIterations));
Assertions.assertThrows(IllegalArgumentException.class, () -> BigFraction.from(1.0, eps, 0));
// Test a zero epsilon is allowed
assertFraction(1, 1, BigFraction.from(1.0, 0, maxIterations));
}
@Test
void testDoubleConstructorGoldenRatioThrows() {
// the golden ratio is notoriously a difficult number for continuous fraction
Assertions.assertThrows(FractionException.class,
() -> BigFraction.from((1 + Math.sqrt(5)) / 2, 1.0e-12, 25)
);
}
// MATH-1029
@Test
void testDoubleConstructorWithMaxDenominatorOverFlow() {
Assertions.assertThrows(ArithmeticException.class,
() -> BigFraction.from(1e10, 1000)
);
Assertions.assertThrows(ArithmeticException.class,
() -> BigFraction.from(-1e10, 1000)
);
}
@Test
void testDoubleConstructorOverflow() {
assertDoubleConstructorOverflow(0.75000000001455192);
assertDoubleConstructorOverflow(1.0e10);
assertDoubleConstructorOverflow(-1.0e10);
assertDoubleConstructorOverflow(-43979.60679604749);
}
private void assertDoubleConstructorOverflow(final double a) {
Assertions.assertThrows(ArithmeticException.class,
() -> BigFraction.from(a, 1.0e-12, 1000)
);
}
@Test
void testDoubleConstructorWithEpsilonLimit() {
assertFraction(2, 5, BigFraction.from(0.4, 1.0e-5, 100));
assertFraction(3, 5, BigFraction.from(0.6152, 0.02, 100));
assertFraction(8, 13, BigFraction.from(0.6152, 1.0e-3, 100));
assertFraction(251, 408, BigFraction.from(0.6152, 1.0e-4, 100));
assertFraction(251, 408, BigFraction.from(0.6152, 1.0e-5, 100));
assertFraction(510, 829, BigFraction.from(0.6152, 1.0e-6, 100));
assertFraction(769, 1250, BigFraction.from(0.6152, 1.0e-7, 100));
}
@Test
void testIsOne() {
Assertions.assertTrue(BigFraction.of(1).isOne());
Assertions.assertTrue(BigFraction.of(1, 2).multiply(BigFraction.of(2)).isOne());
BigFraction value = BigFraction.of(17, 33);
Assertions.assertTrue(value.multiply(value.reciprocal()).isOne());
}
@Test
void testIsZero() {
Assertions.assertTrue(BigFraction.of(0, 4712).isZero());
Assertions.assertTrue(BigFraction.of(3).subtract(BigFraction.of(3)).isZero());
BigFraction value = BigFraction.of(11, 1111111111);
Assertions.assertTrue(value.multiply(value.zero()).isZero());
}
@Test
void testCompareTo() {
final BigFraction a = BigFraction.of(1, 2);
final BigFraction b = BigFraction.of(1, 3);
final BigFraction c = BigFraction.of(1, 2);
final BigFraction d = BigFraction.of(-1, 2);
final BigFraction e = BigFraction.of(1, -2);
final BigFraction f = BigFraction.of(-1, -2);
final BigFraction g = BigFraction.of(-1, Integer.MIN_VALUE);
final BigFraction h = BigFraction.of(-2, 1);
final BigFraction i = BigFraction.of(-1, 1);
Assertions.assertEquals(0, a.compareTo(a));
Assertions.assertEquals(0, a.compareTo(c));
Assertions.assertEquals(1, a.compareTo(b));
Assertions.assertEquals(-1, b.compareTo(a));
Assertions.assertEquals(-1, d.compareTo(a));
Assertions.assertEquals(1, a.compareTo(d));
Assertions.assertEquals(-1, e.compareTo(a));
Assertions.assertEquals(1, a.compareTo(e));
Assertions.assertEquals(0, d.compareTo(e));
Assertions.assertEquals(0, a.compareTo(f));
Assertions.assertEquals(0, f.compareTo(a));
Assertions.assertEquals(1, f.compareTo(e));
Assertions.assertEquals(-1, e.compareTo(f));
Assertions.assertEquals(-1, g.compareTo(a));
Assertions.assertEquals(-1, g.compareTo(f));
Assertions.assertEquals(1, a.compareTo(g));
Assertions.assertEquals(-1, d.compareTo(g));
Assertions.assertEquals(-1, h.compareTo(i)); // JIRA:NUMBERS-207
Assertions.assertEquals(0, BigFraction.of(0, 3).compareTo(BigFraction.of(0, -2)));
// these two values are different approximations of PI
// the first one is approximately PI - 3.07e-18
// the second one is approximately PI + 1.936e-17
final BigFraction pi1 = BigFraction.of(1068966896, 340262731);
final BigFraction pi2 = BigFraction.of(411557987, 131002976);
Assertions.assertEquals(-1, pi1.compareTo(pi2));
Assertions.assertEquals(1, pi2.compareTo(pi1));
Assertions.assertEquals(0.0, pi1.doubleValue() - pi2.doubleValue(), 1.0e-20);
Assertions.assertEquals(0, ZERO_P.compareTo(ZERO_N));
}
@Test
void testDoubleValue() {
assertDoubleValue(0.5, 1, 2);
assertDoubleValue(-0.5, -1, 2);
assertDoubleValue(-0.5, 1, -2);
assertDoubleValue(0.5, -1, -2);
assertDoubleValue(1.0 / 3.0, 1, 3);
Assertions.assertEquals(0.0, BigFraction.ZERO.doubleValue());
Assertions.assertEquals(0.0, ZERO_P.doubleValue());
Assertions.assertEquals(0.0, ZERO_N.doubleValue());
// NUMBERS-120
assertDoubleValue(
2d - 0x1P-52,
1L << 54,
(1L << 53) + 1L
);
assertDoubleValue(
2d,
(1L << 54) - 1L,
1L << 53
);
assertDoubleValue(
1d,
(1L << 53) + 1L,
1L << 53
);
}
@Test
void testDoubleValueForSubnormalNumbers() {
assertDoubleValue(
//Double.MIN_VALUE * 2/3
Double.MIN_VALUE,
BigInteger.ONE,
BigInteger.ONE.shiftLeft(1073).multiply(BigInteger.valueOf(3L))
);
assertDoubleValue(
Double.MIN_VALUE,
BigInteger.ONE,
BigInteger.ONE.shiftLeft(1074)
);
assertDoubleValue(
Double.MIN_VALUE * 2,
BigInteger.valueOf(2),
BigInteger.ONE.shiftLeft(1074)
);
assertDoubleValue(
Double.MIN_VALUE * 3,
BigInteger.valueOf(3),
BigInteger.ONE.shiftLeft(1074)
);
assertDoubleValue(
Double.MIN_NORMAL - Double.MIN_VALUE,
BigInteger.ONE.shiftLeft(52).subtract(BigInteger.ONE),
BigInteger.ONE.shiftLeft(1074)
);
assertDoubleValue(
Double.MIN_NORMAL - 2 * Double.MIN_VALUE,
BigInteger.ONE.shiftLeft(52).subtract(BigInteger.valueOf(2)),
BigInteger.ONE.shiftLeft(1074)
);
//this number is smaller than Double.MIN_NORMAL, but should round up to it
assertDoubleValue(
Double.MIN_NORMAL,
BigInteger.ONE.shiftLeft(53).subtract(BigInteger.ONE),
BigInteger.ONE.shiftLeft(1075)
);
}
@Test
void testDoubleValueForInfinities() {
//the smallest integer that rounds up to Double.POSITIVE_INFINITY
final BigInteger minInf = BigInteger.ONE
.shiftLeft(1024)
.subtract(BigInteger.ONE.shiftLeft(970));
assertDoubleValue(
Double.NEGATIVE_INFINITY,
minInf.negate(),
BigInteger.ONE
);
assertDoubleValue(
Double.POSITIVE_INFINITY,
minInf,
BigInteger.ONE
);
}
// MATH-744
@Test
void testDoubleValueForLargeNumeratorAndDenominator() {
final BigInteger pow400 = BigInteger.TEN.pow(400);
final BigInteger pow401 = BigInteger.TEN.pow(401);
final BigInteger two = new BigInteger("2");
final BigFraction large = BigFraction.of(pow401.add(BigInteger.ONE),
pow400.multiply(two));
Assertions.assertEquals(5, large.doubleValue(), 1e-15);
}
// MATH-744
@Test
void testFloatValueForLargeNumeratorAndDenominator() {
final BigInteger pow400 = BigInteger.TEN.pow(400);
final BigInteger pow401 = BigInteger.TEN.pow(401);
final BigInteger two = new BigInteger("2");
final BigFraction large = BigFraction.of(pow401.add(BigInteger.ONE),
pow400.multiply(two));
Assertions.assertEquals(5, large.floatValue(), 1e-15);
}
@Test
void testDoubleValueForLargeNumeratorAndSmallDenominator() {
// NUMBERS-15
final BigInteger pow300 = BigInteger.TEN.pow(300);
final BigInteger pow330 = BigInteger.TEN.pow(330);
final BigFraction large = BigFraction.of(pow330.add(BigInteger.ONE),
pow300);
Assertions.assertEquals(1e30, large.doubleValue(), 1e-15);
// NUMBERS-120
assertDoubleValue(
5.992310449541053E307,
BigInteger.ONE
.shiftLeft(1024)
.subtract(BigInteger.ONE.shiftLeft(970))
.add(BigInteger.ONE),
BigInteger.valueOf(3)
);
assertDoubleValue(
Double.MAX_VALUE,
BigInteger.ONE
.shiftLeft(1025)
.subtract(BigInteger.ONE.shiftLeft(972))
.subtract(BigInteger.ONE),
BigInteger.valueOf(2)
);
}
// NUMBERS-15
@Test
void testFloatValueForLargeNumeratorAndSmallDenominator() {
final BigInteger pow30 = BigInteger.TEN.pow(30);
final BigInteger pow40 = BigInteger.TEN.pow(40);
final BigFraction large = BigFraction.of(pow40.add(BigInteger.ONE),
pow30);
Assertions.assertEquals(1e10f, large.floatValue(), 1e-15);
}
@Test
void testFloatValue() {
Assertions.assertEquals(0.5f, BigFraction.of(1, 2).floatValue());
Assertions.assertEquals(0.5f, BigFraction.of(-1, -2).floatValue());
Assertions.assertEquals(-0.5f, BigFraction.of(-1, 2).floatValue());
Assertions.assertEquals(-0.5f, BigFraction.of(1, -2).floatValue());
final float e = 1f / 3f;
Assertions.assertEquals(e, BigFraction.of(1, 3).floatValue());
Assertions.assertEquals(e, BigFraction.of(-1, -3).floatValue());
Assertions.assertEquals(-e, BigFraction.of(-1, 3).floatValue());
Assertions.assertEquals(-e, BigFraction.of(1, -3).floatValue());
Assertions.assertEquals(0.0f, ZERO_P.floatValue());
Assertions.assertEquals(0.0f, ZERO_N.floatValue());
}
@Test
void testIntValue() {
Assertions.assertEquals(0, BigFraction.of(1, 2).intValue());
Assertions.assertEquals(0, BigFraction.of(-1, -2).intValue());
Assertions.assertEquals(0, BigFraction.of(-1, 2).intValue());
Assertions.assertEquals(0, BigFraction.of(1, -2).intValue());
Assertions.assertEquals(1, BigFraction.of(3, 2).intValue());
Assertions.assertEquals(1, BigFraction.of(-3, -2).intValue());
Assertions.assertEquals(-1, BigFraction.of(-3, 2).intValue());
Assertions.assertEquals(-1, BigFraction.of(3, -2).intValue());
Assertions.assertEquals(0, ZERO_P.intValue());
Assertions.assertEquals(0, ZERO_N.intValue());
}
@Test
void testLongValue() {
Assertions.assertEquals(0L, BigFraction.of(1, 2).longValue());
Assertions.assertEquals(0L, BigFraction.of(-1, -2).longValue());
Assertions.assertEquals(0L, BigFraction.of(-1, 2).longValue());
Assertions.assertEquals(0L, BigFraction.of(1, -2).longValue());
Assertions.assertEquals(1L, BigFraction.of(3, 2).longValue());
Assertions.assertEquals(1L, BigFraction.of(-3, -2).longValue());
Assertions.assertEquals(-1L, BigFraction.of(-3, 2).longValue());
Assertions.assertEquals(-1L, BigFraction.of(3, -2).longValue());
Assertions.assertEquals(0, ZERO_P.longValue());
Assertions.assertEquals(0, ZERO_N.longValue());
}
@Test
void testBigDecimalValue() {
Assertions.assertEquals(new BigDecimal(0.5), BigFraction.of(1, 2).bigDecimalValue());
Assertions.assertEquals(new BigDecimal("0.0003"), BigFraction.of(3, 10000).bigDecimalValue());
Assertions.assertEquals(new BigDecimal("0"), BigFraction.of(1, 3).bigDecimalValue(RoundingMode.DOWN));
Assertions.assertEquals(new BigDecimal("0.333"), BigFraction.of(1, 3).bigDecimalValue(3, RoundingMode.DOWN));
}
@Test
void testAbs() {
for (final CommonTestCases.UnaryOperatorTestCase testCase : CommonTestCases.absTestCases()) {
final BigFraction f = BigFraction.of(testCase.operandNumerator, testCase.operandDenominator);
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f.abs());
}
}
@Test
void testReciprocal() {
for (final CommonTestCases.UnaryOperatorTestCase testCase : CommonTestCases.reciprocalTestCases()) {
final BigFraction f = BigFraction.of(testCase.operandNumerator, testCase.operandDenominator);
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f.reciprocal());
}
final BigFraction f = BigFraction.of(0, 3);
Assertions.assertThrows(ArithmeticException.class, f::reciprocal);
}
@Test
void testNegate() {
for (final CommonTestCases.UnaryOperatorTestCase testCase : CommonTestCases.negateTestCases()) {
final BigFraction f = BigFraction.of(testCase.operandNumerator, testCase.operandDenominator);
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f.negate());
}
}
@Test
void testAdd() {
for (final CommonTestCases.BinaryOperatorTestCase testCase : CommonTestCases.addFractionTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final BigFraction f2 = BigFraction.of(testCase.secondOperandNumerator, testCase.secondOperandDenominator);
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.add(f2));
}
for (final CommonTestCases.BinaryIntOperatorTestCase testCase : CommonTestCases.addIntTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final int i2 = testCase.secondOperand;
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.add(i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.add((long) i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.add(BigInteger.valueOf(i2)));
}
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.add((BigFraction) null));
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.add((BigInteger) null));
// Special cases
final BigFraction f2 = BigFraction.of(1, 2);
assertFraction(1, 2, f2.add(BigInteger.ZERO));
assertFraction(12, 1, BigFraction.ZERO.add(BigInteger.valueOf(12)));
}
@Test
void testDivide() {
for (final CommonTestCases.BinaryOperatorTestCase testCase : CommonTestCases.divideByFractionTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final BigFraction f2 = BigFraction.of(testCase.secondOperandNumerator, testCase.secondOperandDenominator);
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.divide(f2));
}
for (final CommonTestCases.BinaryIntOperatorTestCase testCase : CommonTestCases.divideByIntTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final int i2 = testCase.secondOperand;
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.divide(i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.divide((long) i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.divide(BigInteger.valueOf(i2)));
}
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.divide((BigFraction) null));
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.divide((BigInteger) null));
Assertions.assertThrows(FractionException.class, () -> BigFraction.of(1, 2).divide(BigFraction.ZERO));
Assertions.assertThrows(FractionException.class, () -> BigFraction.of(1, 2).divide(0));
Assertions.assertThrows(FractionException.class, () -> BigFraction.of(1, 2).divide(0L));
Assertions.assertThrows(FractionException.class, () -> BigFraction.of(1, 2).divide(BigInteger.ZERO));
// Special cases
final BigFraction f1 = BigFraction.of(Integer.MIN_VALUE, Integer.MAX_VALUE);
assertFraction(-1, -Integer.MAX_VALUE, f1.divide(Integer.MIN_VALUE));
assertFraction(-1, -Integer.MAX_VALUE, f1.divide((long) Integer.MIN_VALUE));
assertFraction(-1, -Integer.MAX_VALUE, f1.divide(BigInteger.valueOf(Integer.MIN_VALUE)));
}
@Test
void testMultiply() {
for (final CommonTestCases.BinaryOperatorTestCase testCase : CommonTestCases.multiplyByFractionTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final BigFraction f2 = BigFraction.of(testCase.secondOperandNumerator, testCase.secondOperandDenominator);
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.multiply(f2));
}
for (final CommonTestCases.BinaryIntOperatorTestCase testCase : CommonTestCases.multiplyByIntTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final int i2 = testCase.secondOperand;
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.multiply(i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.multiply((long) i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.multiply(BigInteger.valueOf(i2)));
}
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.multiply((BigFraction) null));
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.multiply((BigInteger) null));
}
@Test
void testPow() {
for (final CommonTestCases.BinaryIntOperatorTestCase testCase : CommonTestCases.powTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final int exponent = testCase.secondOperand;
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.pow(exponent));
}
// Note: BigInteger magnitude is limited to 2^Integer.MAX_VALUE exclusive
// in the reference implementation (up to at least JDK 14).
Assertions.assertThrows(ArithmeticException.class, () -> BigFraction.of(2).pow(Integer.MAX_VALUE));
Assertions.assertThrows(ArithmeticException.class, () -> BigFraction.of(1, 2).pow(Integer.MAX_VALUE));
Assertions.assertThrows(ArithmeticException.class, () -> BigFraction.of(2).pow(-Integer.MAX_VALUE));
Assertions.assertThrows(ArithmeticException.class, () -> BigFraction.of(1, 2).pow(-Integer.MAX_VALUE));
}
@Test
void testSubtract() {
for (final CommonTestCases.BinaryOperatorTestCase testCase : CommonTestCases.subtractFractionTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final BigFraction f2 = BigFraction.of(testCase.secondOperandNumerator, testCase.secondOperandDenominator);
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.subtract(f2));
}
for (final CommonTestCases.BinaryIntOperatorTestCase testCase : CommonTestCases.subtractIntTestCases()) {
final BigFraction f1 = BigFraction.of(testCase.firstOperandNumerator, testCase.firstOperandDenominator);
final int i2 = testCase.secondOperand;
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.subtract(i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.subtract((long) i2));
assertFraction(testCase.expectedNumerator, testCase.expectedDenominator, f1.subtract(BigInteger.valueOf(i2)));
}
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.subtract((BigFraction) null));
Assertions.assertThrows(NullPointerException.class, () -> BigFraction.ONE.subtract((BigInteger) null));
}
@Test
void testEqualsAndHashCode() {
final BigFraction zero = BigFraction.of(0, 1);
Assertions.assertEquals(zero, zero);
Assertions.assertNotEquals(zero, null);
Assertions.assertNotEquals(zero, new Object());
Assertions.assertNotEquals(zero, Double.valueOf(0));
// Equal to same rational number
final BigFraction zero2 = BigFraction.of(0, 2);
assertEqualAndHashCodeEqual(zero, zero2);
// Not equal to different rational number
final BigFraction one = BigFraction.of(1, 1);
Assertions.assertNotEquals(zero, one);
Assertions.assertNotEquals(one, zero);
// Test using different representations of the same fraction
// (Denominators are primes)
for (final int[] f : new int[][] {{1, 1}, {2, 3}, {6826, 15373}, {1373, 103813}, {0, 3}}) {
final int num = f[0];
final int den = f[1];
BigFraction f1 = BigFraction.of(-num, den);
BigFraction f2 = BigFraction.of(num, -den);
assertEqualAndHashCodeEqual(f1, f2);
assertEqualAndHashCodeEqual(f2, f1);
f1 = BigFraction.of(num, den);
f2 = BigFraction.of(-num, -den);
assertEqualAndHashCodeEqual(f1, f2);
assertEqualAndHashCodeEqual(f2, f1);
}
// Same numerator or denominator as 1/1
final BigFraction half = BigFraction.of(1, 2);
final BigFraction two = BigFraction.of(2, 1);
Assertions.assertNotEquals(one, half);
Assertions.assertNotEquals(one, two);
}
/**
* Assert the two fractions are equal. The contract of {@link Object#hashCode()} requires
* that the hash code must also be equal.
*
* <p>Ideally this method should not be called with the same instance for both arguments.
* It is intended to be used to test different objects that are equal have the same hash code.
* However the same object may be constructed for different arguments using factory
* constructors, e.g. zero.
*
* @param f1 Fraction 1.
* @param f2 Fraction 2.
*/
private static void assertEqualAndHashCodeEqual(BigFraction f1, BigFraction f2) {
Assertions.assertEquals(f1, f2);
Assertions.assertEquals(f1.hashCode(), f2.hashCode(), "Equal fractions have different hashCode");
// Check the computation matches the result of Arrays.hashCode and the signum.
// This is not mandated but is a recommendation.
final int expected = f1.signum() *
Arrays.hashCode(new Object[] {f1.getNumerator().abs(),
f1.getDenominator().abs()});
Assertions.assertEquals(expected, f1.hashCode(), "Hashcode not equal to using Arrays.hashCode");
}
@Test
void testAdditiveNeutral() {
Assertions.assertEquals(BigFraction.ZERO, BigFraction.ONE.zero());
}
@Test
void testMultiplicativeNeutral() {
Assertions.assertEquals(BigFraction.ONE, BigFraction.ZERO.one());
}
@Test
void testSerial() {
final BigFraction[] fractions = {
BigFraction.of(3, 4), BigFraction.ONE, BigFraction.ZERO,
BigFraction.of(17), BigFraction.from(Math.PI, 1000),
BigFraction.of(-5, 2)
};
for (final BigFraction fraction : fractions) {
Assertions.assertEquals(fraction,
TestUtils.serializeAndRecover(fraction));
}
}
@Test
void testToString() {
Assertions.assertEquals("0", BigFraction.of(0, 3).toString());
Assertions.assertEquals("0", BigFraction.of(0, -3).toString());
Assertions.assertEquals("3", BigFraction.of(6, 2).toString());
Assertions.assertEquals("2 / 3", BigFraction.of(18, 27).toString());
Assertions.assertEquals("-10 / 11", BigFraction.of(-10, 11).toString());
Assertions.assertEquals("10 / -11", BigFraction.of(10, -11).toString());
}
@Test
void testParse() {
final String[] validExpressions = new String[] {
"1 / 2",
"-1 / 2",
"1 / -2",
"-1 / -2",
"01 / 2",
"01 / 02",
"-01 / 02",
"01 / -02",
"15 / 16",
"-2 / 3",
"8 / 7",
"5",
"-3",
"-3",
"2147,483,647 / 2,147,483,648", //over largest int value
"9,223,372,036,854,775,807 / 9,223,372,036,854,775,808" //over largest long value
};
final BigFraction[] fractions = {
BigFraction.of(1, 2),
BigFraction.of(-1, 2),
BigFraction.of(1, -2),
BigFraction.of(-1, -2),
BigFraction.of(1, 2),
BigFraction.of(1, 2),
BigFraction.of(-1, 2),
BigFraction.of(1, -2),
BigFraction.of(15, 16),
BigFraction.of(-2, 3),
BigFraction.of(8, 7),
BigFraction.of(5, 1),
BigFraction.of(-3, 1),
BigFraction.of(3, -1),
BigFraction.of(2147483647, 2147483648L),
BigFraction.of(new BigInteger("9223372036854775807"),
new BigInteger("9223372036854775808"))
};
int inc = 0;
for (final BigFraction fraction: fractions) {
Assertions.assertEquals(fraction,
BigFraction.parse(validExpressions[inc]));
inc++;
}
Assertions.assertThrows(NumberFormatException.class, () -> BigFraction.parse("1 // 2"));
Assertions.assertThrows(NumberFormatException.class, () -> BigFraction.parse("1 / z"));
Assertions.assertThrows(NumberFormatException.class, () -> BigFraction.parse("1 / --2"));
Assertions.assertThrows(NumberFormatException.class, () -> BigFraction.parse("x"));
}
@Test
void testMath340() {
final BigFraction fractionA = BigFraction.from(0.00131);
final BigFraction fractionB = BigFraction.from(.37).reciprocal();
final BigFraction errorResult = fractionA.multiply(fractionB);
final BigFraction correctResult = BigFraction.of(fractionA.getNumerator().multiply(fractionB.getNumerator()),
fractionA.getDenominator().multiply(fractionB.getDenominator()));
Assertions.assertEquals(correctResult, errorResult);
}
@Test
void testNumbers150() {
// zero to negative powers should throw an exception
Assertions.assertThrows(ArithmeticException.class, () -> BigFraction.ZERO.pow(-1));
Assertions.assertThrows(ArithmeticException.class, () -> BigFraction.ZERO.pow(Integer.MIN_VALUE));
// shall overflow
final BigFraction f2 = BigFraction.of(2);
Assertions.assertThrows(ArithmeticException.class, () -> f2.pow(Integer.MIN_VALUE));
final BigFraction f12 = BigFraction.of(1, 2);
Assertions.assertThrows(ArithmeticException.class, () -> f12.pow(Integer.MIN_VALUE));
}
}
|
googleapis/google-cloud-java | 37,462 | java-datacatalog/proto-google-cloud-datacatalog-v1beta1/src/main/java/com/google/cloud/datacatalog/v1beta1/ListPolicyTagsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1beta1/policytagmanager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1beta1;
/**
*
*
* <pre>
* Response message for
* [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse}
*/
public final class ListPolicyTagsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse)
ListPolicyTagsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPolicyTagsResponse.newBuilder() to construct.
private ListPolicyTagsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPolicyTagsResponse() {
policyTags_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListPolicyTagsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1beta1_ListPolicyTagsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1beta1_ListPolicyTagsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse.class,
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse.Builder.class);
}
public static final int POLICY_TAGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datacatalog.v1beta1.PolicyTag> policyTags_;
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datacatalog.v1beta1.PolicyTag> getPolicyTagsList() {
return policyTags_;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datacatalog.v1beta1.PolicyTagOrBuilder>
getPolicyTagsOrBuilderList() {
return policyTags_;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
@java.lang.Override
public int getPolicyTagsCount() {
return policyTags_.size();
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.PolicyTag getPolicyTags(int index) {
return policyTags_.get(index);
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.PolicyTagOrBuilder getPolicyTagsOrBuilder(int index) {
return policyTags_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token used to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token used to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < policyTags_.size(); i++) {
output.writeMessage(1, policyTags_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < policyTags_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, policyTags_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse other =
(com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse) obj;
if (!getPolicyTagsList().equals(other.getPolicyTagsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPolicyTagsCount() > 0) {
hash = (37 * hash) + POLICY_TAGS_FIELD_NUMBER;
hash = (53 * hash) + getPolicyTagsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse)
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1beta1_ListPolicyTagsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1beta1_ListPolicyTagsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse.class,
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (policyTagsBuilder_ == null) {
policyTags_ = java.util.Collections.emptyList();
} else {
policyTags_ = null;
policyTagsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1beta1_ListPolicyTagsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse build() {
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse buildPartial() {
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse result =
new com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse result) {
if (policyTagsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
policyTags_ = java.util.Collections.unmodifiableList(policyTags_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.policyTags_ = policyTags_;
} else {
result.policyTags_ = policyTagsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse) {
return mergeFrom((com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse other) {
if (other == com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse.getDefaultInstance())
return this;
if (policyTagsBuilder_ == null) {
if (!other.policyTags_.isEmpty()) {
if (policyTags_.isEmpty()) {
policyTags_ = other.policyTags_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePolicyTagsIsMutable();
policyTags_.addAll(other.policyTags_);
}
onChanged();
}
} else {
if (!other.policyTags_.isEmpty()) {
if (policyTagsBuilder_.isEmpty()) {
policyTagsBuilder_.dispose();
policyTagsBuilder_ = null;
policyTags_ = other.policyTags_;
bitField0_ = (bitField0_ & ~0x00000001);
policyTagsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPolicyTagsFieldBuilder()
: null;
} else {
policyTagsBuilder_.addAllMessages(other.policyTags_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datacatalog.v1beta1.PolicyTag m =
input.readMessage(
com.google.cloud.datacatalog.v1beta1.PolicyTag.parser(), extensionRegistry);
if (policyTagsBuilder_ == null) {
ensurePolicyTagsIsMutable();
policyTags_.add(m);
} else {
policyTagsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datacatalog.v1beta1.PolicyTag> policyTags_ =
java.util.Collections.emptyList();
private void ensurePolicyTagsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
policyTags_ =
new java.util.ArrayList<com.google.cloud.datacatalog.v1beta1.PolicyTag>(policyTags_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.PolicyTag,
com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder,
com.google.cloud.datacatalog.v1beta1.PolicyTagOrBuilder>
policyTagsBuilder_;
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.v1beta1.PolicyTag> getPolicyTagsList() {
if (policyTagsBuilder_ == null) {
return java.util.Collections.unmodifiableList(policyTags_);
} else {
return policyTagsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public int getPolicyTagsCount() {
if (policyTagsBuilder_ == null) {
return policyTags_.size();
} else {
return policyTagsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.PolicyTag getPolicyTags(int index) {
if (policyTagsBuilder_ == null) {
return policyTags_.get(index);
} else {
return policyTagsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder setPolicyTags(int index, com.google.cloud.datacatalog.v1beta1.PolicyTag value) {
if (policyTagsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePolicyTagsIsMutable();
policyTags_.set(index, value);
onChanged();
} else {
policyTagsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder setPolicyTags(
int index, com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder builderForValue) {
if (policyTagsBuilder_ == null) {
ensurePolicyTagsIsMutable();
policyTags_.set(index, builderForValue.build());
onChanged();
} else {
policyTagsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder addPolicyTags(com.google.cloud.datacatalog.v1beta1.PolicyTag value) {
if (policyTagsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePolicyTagsIsMutable();
policyTags_.add(value);
onChanged();
} else {
policyTagsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder addPolicyTags(int index, com.google.cloud.datacatalog.v1beta1.PolicyTag value) {
if (policyTagsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePolicyTagsIsMutable();
policyTags_.add(index, value);
onChanged();
} else {
policyTagsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder addPolicyTags(
com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder builderForValue) {
if (policyTagsBuilder_ == null) {
ensurePolicyTagsIsMutable();
policyTags_.add(builderForValue.build());
onChanged();
} else {
policyTagsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder addPolicyTags(
int index, com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder builderForValue) {
if (policyTagsBuilder_ == null) {
ensurePolicyTagsIsMutable();
policyTags_.add(index, builderForValue.build());
onChanged();
} else {
policyTagsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder addAllPolicyTags(
java.lang.Iterable<? extends com.google.cloud.datacatalog.v1beta1.PolicyTag> values) {
if (policyTagsBuilder_ == null) {
ensurePolicyTagsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, policyTags_);
onChanged();
} else {
policyTagsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder clearPolicyTags() {
if (policyTagsBuilder_ == null) {
policyTags_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
policyTagsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public Builder removePolicyTags(int index) {
if (policyTagsBuilder_ == null) {
ensurePolicyTagsIsMutable();
policyTags_.remove(index);
onChanged();
} else {
policyTagsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder getPolicyTagsBuilder(int index) {
return getPolicyTagsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.PolicyTagOrBuilder getPolicyTagsOrBuilder(
int index) {
if (policyTagsBuilder_ == null) {
return policyTags_.get(index);
} else {
return policyTagsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datacatalog.v1beta1.PolicyTagOrBuilder>
getPolicyTagsOrBuilderList() {
if (policyTagsBuilder_ != null) {
return policyTagsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(policyTags_);
}
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder addPolicyTagsBuilder() {
return getPolicyTagsFieldBuilder()
.addBuilder(com.google.cloud.datacatalog.v1beta1.PolicyTag.getDefaultInstance());
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder addPolicyTagsBuilder(int index) {
return getPolicyTagsFieldBuilder()
.addBuilder(index, com.google.cloud.datacatalog.v1beta1.PolicyTag.getDefaultInstance());
}
/**
*
*
* <pre>
* The policy tags that are in the requested taxonomy.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.PolicyTag policy_tags = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder>
getPolicyTagsBuilderList() {
return getPolicyTagsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.PolicyTag,
com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder,
com.google.cloud.datacatalog.v1beta1.PolicyTagOrBuilder>
getPolicyTagsFieldBuilder() {
if (policyTagsBuilder_ == null) {
policyTagsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.PolicyTag,
com.google.cloud.datacatalog.v1beta1.PolicyTag.Builder,
com.google.cloud.datacatalog.v1beta1.PolicyTagOrBuilder>(
policyTags_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
policyTags_ = null;
}
return policyTagsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token used to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token used to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token used to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token used to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token used to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse)
private static final com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse();
}
public static com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPolicyTagsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListPolicyTagsResponse>() {
@java.lang.Override
public ListPolicyTagsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPolicyTagsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPolicyTagsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListPolicyTagsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/rocketmq | 37,917 | broker/src/main/java/org/apache/rocketmq/broker/processor/PopReviveService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.broker.processor;
import com.alibaba.fastjson.JSON;
import io.opentelemetry.api.common.Attributes;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.CompletableFuture;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.rocketmq.broker.BrokerController;
import org.apache.rocketmq.client.consumer.PullResult;
import org.apache.rocketmq.client.consumer.PullStatus;
import org.apache.rocketmq.common.KeyBuilder;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.Pair;
import org.apache.rocketmq.common.PopAckConstants;
import org.apache.rocketmq.common.ServiceThread;
import org.apache.rocketmq.common.TopicConfig;
import org.apache.rocketmq.common.TopicFilterType;
import org.apache.rocketmq.common.constant.LoggerName;
import org.apache.rocketmq.common.message.MessageAccessor;
import org.apache.rocketmq.common.message.MessageConst;
import org.apache.rocketmq.common.message.MessageDecoder;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.message.MessageExtBrokerInner;
import org.apache.rocketmq.common.topic.TopicValidator;
import org.apache.rocketmq.common.utils.DataConverter;
import org.apache.rocketmq.logging.org.slf4j.Logger;
import org.apache.rocketmq.logging.org.slf4j.LoggerFactory;
import org.apache.rocketmq.store.AppendMessageStatus;
import org.apache.rocketmq.store.GetMessageResult;
import org.apache.rocketmq.store.PutMessageResult;
import org.apache.rocketmq.store.exception.ConsumeQueueException;
import org.apache.rocketmq.store.pop.AckMsg;
import org.apache.rocketmq.store.pop.BatchAckMsg;
import org.apache.rocketmq.store.pop.PopCheckPoint;
import static org.apache.rocketmq.broker.metrics.BrokerMetricsConstant.LABEL_CONSUMER_GROUP;
import static org.apache.rocketmq.broker.metrics.BrokerMetricsConstant.LABEL_IS_SYSTEM;
import static org.apache.rocketmq.broker.metrics.BrokerMetricsConstant.LABEL_TOPIC;
public class PopReviveService extends ServiceThread {
private static final Logger POP_LOGGER = LoggerFactory.getLogger(LoggerName.ROCKETMQ_POP_LOGGER_NAME);
private final int[] ckRewriteIntervalsInSeconds = new int[] { 10, 20, 30, 60, 120, 180, 240, 300, 360, 420, 480, 540, 600, 1200, 1800, 3600, 7200 };
private int queueId;
private BrokerController brokerController;
private String reviveTopic;
private long currentReviveMessageTimestamp = -1;
private volatile boolean shouldRunPopRevive = false;
private final NavigableMap<PopCheckPoint/* oldCK */, Pair<Long/* timestamp */, Boolean/* result */>> inflightReviveRequestMap = Collections.synchronizedNavigableMap(new TreeMap<>());
private long reviveOffset;
public PopReviveService(BrokerController brokerController, String reviveTopic, int queueId) {
this.queueId = queueId;
this.brokerController = brokerController;
this.reviveTopic = reviveTopic;
this.reviveOffset = brokerController.getConsumerOffsetManager().queryOffset(PopAckConstants.REVIVE_GROUP, reviveTopic, queueId);
}
@Override
public String getServiceName() {
if (brokerController != null && brokerController.getBrokerConfig().isInBrokerContainer()) {
return brokerController.getBrokerIdentity().getIdentifier() + "PopReviveService_" + this.queueId;
}
return "PopReviveService_" + this.queueId;
}
public int getQueueId() {
return queueId;
}
public void setShouldRunPopRevive(final boolean shouldRunPopRevive) {
this.shouldRunPopRevive = shouldRunPopRevive;
}
public boolean isShouldRunPopRevive() {
return shouldRunPopRevive;
}
private boolean reviveRetry(PopCheckPoint popCheckPoint, MessageExt messageExt) {
MessageExtBrokerInner msgInner = new MessageExtBrokerInner();
if (!popCheckPoint.getTopic().startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) {
msgInner.setTopic(KeyBuilder.buildPopRetryTopic(popCheckPoint.getTopic(), popCheckPoint.getCId(), brokerController.getBrokerConfig().isEnableRetryTopicV2()));
} else {
msgInner.setTopic(popCheckPoint.getTopic());
}
msgInner.setBody(messageExt.getBody());
msgInner.setQueueId(0);
if (messageExt.getTags() != null) {
msgInner.setTags(messageExt.getTags());
} else {
MessageAccessor.setProperties(msgInner, new HashMap<>());
}
msgInner.setBornTimestamp(messageExt.getBornTimestamp());
msgInner.setFlag(messageExt.getFlag());
msgInner.setSysFlag(messageExt.getSysFlag());
msgInner.setBornHost(brokerController.getStoreHost());
msgInner.setStoreHost(brokerController.getStoreHost());
msgInner.setReconsumeTimes(messageExt.getReconsumeTimes() + 1);
msgInner.getProperties().putAll(messageExt.getProperties());
if (messageExt.getReconsumeTimes() == 0 || msgInner.getProperties().get(MessageConst.PROPERTY_FIRST_POP_TIME) == null) {
msgInner.getProperties().put(MessageConst.PROPERTY_FIRST_POP_TIME, String.valueOf(popCheckPoint.getPopTime()));
}
msgInner.setPropertiesString(MessageDecoder.messageProperties2String(msgInner.getProperties()));
addRetryTopicIfNotExist(msgInner.getTopic(), popCheckPoint.getCId());
PutMessageResult putMessageResult = brokerController.getEscapeBridge().putMessageToSpecificQueue(msgInner);
brokerController.getBrokerMetricsManager().getPopMetricsManager().incPopReviveRetryMessageCount(popCheckPoint, putMessageResult.getPutMessageStatus());
if (brokerController.getBrokerConfig().isEnablePopLog()) {
POP_LOGGER.info("reviveQueueId={},retry msg, ck={}, msg queueId {}, offset {}, reviveDelay={}, result is {} ",
queueId, popCheckPoint, messageExt.getQueueId(), messageExt.getQueueOffset(),
(System.currentTimeMillis() - popCheckPoint.getReviveTime()) / 1000, putMessageResult);
}
if (putMessageResult.getAppendMessageResult() == null ||
putMessageResult.getAppendMessageResult().getStatus() != AppendMessageStatus.PUT_OK) {
POP_LOGGER.error("reviveQueueId={}, revive error, msg is: {}", queueId, msgInner);
return false;
}
this.brokerController.getPopInflightMessageCounter().decrementInFlightMessageNum(popCheckPoint);
this.brokerController.getBrokerStatsManager().incBrokerPutNums(popCheckPoint.getTopic(), 1);
this.brokerController.getBrokerStatsManager().incTopicPutNums(msgInner.getTopic());
this.brokerController.getBrokerStatsManager().incTopicPutSize(msgInner.getTopic(), putMessageResult.getAppendMessageResult().getWroteBytes());
return true;
}
private void initPopRetryOffset(String topic, String consumerGroup) {
long offset = this.brokerController.getConsumerOffsetManager().queryOffset(consumerGroup, topic, 0);
if (offset < 0) {
this.brokerController.getConsumerOffsetManager().commitOffset("initPopRetryOffset", consumerGroup, topic,
0, 0);
}
}
public void addRetryTopicIfNotExist(String topic, String consumerGroup) {
if (brokerController != null) {
TopicConfig topicConfig = brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (topicConfig != null) {
return;
}
topicConfig = new TopicConfig(topic);
topicConfig.setReadQueueNums(PopAckConstants.retryQueueNum);
topicConfig.setWriteQueueNums(PopAckConstants.retryQueueNum);
topicConfig.setTopicFilterType(TopicFilterType.SINGLE_TAG);
topicConfig.setPerm(6);
topicConfig.setTopicSysFlag(0);
brokerController.getTopicConfigManager().updateTopicConfig(topicConfig);
initPopRetryOffset(topic, consumerGroup);
}
}
protected List<MessageExt> getReviveMessage(long offset, int queueId) {
PullResult pullResult = getMessage(PopAckConstants.REVIVE_GROUP, reviveTopic, queueId, offset, 32, true);
if (pullResult == null) {
return null;
}
if (reachTail(pullResult, offset)) {
if (this.brokerController.getBrokerConfig().isEnablePopLog()) {
POP_LOGGER.info("reviveQueueId={}, reach tail,offset {}", queueId, offset);
}
} else if (pullResult.getPullStatus() == PullStatus.OFFSET_ILLEGAL || pullResult.getPullStatus() == PullStatus.NO_MATCHED_MSG) {
POP_LOGGER.error("reviveQueueId={}, OFFSET_ILLEGAL {}, result is {}", queueId, offset, pullResult);
if (!shouldRunPopRevive) {
POP_LOGGER.info("slave skip offset correct topic={}, reviveQueueId={}", reviveTopic, queueId);
return null;
}
this.brokerController.getConsumerOffsetManager().commitOffset(PopAckConstants.LOCAL_HOST, PopAckConstants.REVIVE_GROUP, reviveTopic, queueId, pullResult.getNextBeginOffset() - 1);
}
return pullResult.getMsgFoundList();
}
private boolean reachTail(PullResult pullResult, long offset) {
return pullResult.getPullStatus() == PullStatus.NO_NEW_MSG
|| pullResult.getPullStatus() == PullStatus.OFFSET_ILLEGAL && offset == pullResult.getMaxOffset();
}
// Triple<MessageExt, info, needRetry>
public CompletableFuture<Triple<MessageExt, String, Boolean>> getBizMessage(PopCheckPoint popCheckPoint, long offset) {
return this.brokerController.getEscapeBridge().getMessageAsync(popCheckPoint.getTopic(), offset, popCheckPoint.getQueueId(), popCheckPoint.getBrokerName(), false);
}
public PullResult getMessage(String group, String topic, int queueId, long offset, int nums,
boolean deCompressBody) {
GetMessageResult getMessageResult = this.brokerController.getMessageStore().getMessage(group, topic, queueId, offset, nums, null);
if (getMessageResult != null) {
PullStatus pullStatus = PullStatus.NO_NEW_MSG;
List<MessageExt> foundList = null;
switch (getMessageResult.getStatus()) {
case FOUND:
pullStatus = PullStatus.FOUND;
foundList = decodeMsgList(getMessageResult, deCompressBody);
brokerController.getBrokerStatsManager().incGroupGetNums(group, topic, getMessageResult.getMessageCount());
brokerController.getBrokerStatsManager().incGroupGetSize(group, topic, getMessageResult.getBufferTotalSize());
brokerController.getBrokerStatsManager().incBrokerGetNums(topic, getMessageResult.getMessageCount());
brokerController.getBrokerStatsManager().recordDiskFallBehindTime(group, topic, queueId,
brokerController.getMessageStore().now() - foundList.get(foundList.size() - 1).getStoreTimestamp());
Attributes attributes = this.brokerController.getBrokerMetricsManager().newAttributesBuilder()
.put(LABEL_TOPIC, topic)
.put(LABEL_CONSUMER_GROUP, group)
.put(LABEL_IS_SYSTEM, TopicValidator.isSystemTopic(topic) || MixAll.isSysConsumerGroup(group))
.build();
this.brokerController.getBrokerMetricsManager().getMessagesOutTotal().add(getMessageResult.getMessageCount(), attributes);
this.brokerController.getBrokerMetricsManager().getThroughputOutTotal().add(getMessageResult.getBufferTotalSize(), attributes);
break;
case NO_MATCHED_MESSAGE:
pullStatus = PullStatus.NO_MATCHED_MSG;
POP_LOGGER.debug("no matched message. GetMessageStatus={}, topic={}, groupId={}, requestOffset={}",
getMessageResult.getStatus(), topic, group, offset);
break;
case NO_MESSAGE_IN_QUEUE:
POP_LOGGER.debug("no new message. GetMessageStatus={}, topic={}, groupId={}, requestOffset={}",
getMessageResult.getStatus(), topic, group, offset);
break;
case MESSAGE_WAS_REMOVING:
case NO_MATCHED_LOGIC_QUEUE:
case OFFSET_FOUND_NULL:
case OFFSET_OVERFLOW_BADLY:
case OFFSET_TOO_SMALL:
pullStatus = PullStatus.OFFSET_ILLEGAL;
POP_LOGGER.warn("offset illegal. GetMessageStatus={}, topic={}, groupId={}, requestOffset={}",
getMessageResult.getStatus(), topic, group, offset);
break;
case OFFSET_OVERFLOW_ONE:
// no need to print WARN, because we use "offset + 1" to get the next message
pullStatus = PullStatus.OFFSET_ILLEGAL;
break;
default:
assert false;
break;
}
return new PullResult(pullStatus, getMessageResult.getNextBeginOffset(), getMessageResult.getMinOffset(),
getMessageResult.getMaxOffset(), foundList);
} else {
try {
long maxQueueOffset = brokerController.getMessageStore().getMaxOffsetInQueue(topic, queueId);
if (maxQueueOffset > offset) {
POP_LOGGER.error("get message from store return null. topic={}, groupId={}, requestOffset={}, maxQueueOffset={}",
topic, group, offset, maxQueueOffset);
}
} catch (ConsumeQueueException e) {
POP_LOGGER.error("Failed to get max offset in queue", e);
}
return null;
}
}
private List<MessageExt> decodeMsgList(GetMessageResult getMessageResult, boolean deCompressBody) {
List<MessageExt> foundList = new ArrayList<>();
try {
List<ByteBuffer> messageBufferList = getMessageResult.getMessageBufferList();
if (messageBufferList != null) {
for (int i = 0; i < messageBufferList.size(); i++) {
ByteBuffer bb = messageBufferList.get(i);
if (bb == null) {
POP_LOGGER.error("bb is null {}", getMessageResult);
continue;
}
MessageExt msgExt = MessageDecoder.decode(bb, true, deCompressBody);
if (msgExt == null) {
POP_LOGGER.error("decode msgExt is null {}", getMessageResult);
continue;
}
// use CQ offset, not offset in Message
msgExt.setQueueOffset(getMessageResult.getMessageQueueOffset().get(i));
foundList.add(msgExt);
}
}
} finally {
getMessageResult.release();
}
return foundList;
}
protected void consumeReviveMessage(ConsumeReviveObj consumeReviveObj) {
HashMap<String, PopCheckPoint> map = consumeReviveObj.map;
HashMap<String, PopCheckPoint> mockPointMap = new HashMap<>();
long startScanTime = System.currentTimeMillis();
long endTime = 0;
long consumeOffset = this.brokerController.getConsumerOffsetManager().queryOffset(PopAckConstants.REVIVE_GROUP, reviveTopic, queueId);
long oldOffset = Math.max(reviveOffset, consumeOffset);
consumeReviveObj.oldOffset = oldOffset;
POP_LOGGER.info("reviveQueueId={}, old offset is {} ", queueId, oldOffset);
long offset = oldOffset + 1;
int noMsgCount = 0;
long firstRt = 0;
// offset self amend
while (true) {
if (!shouldRunPopRevive) {
POP_LOGGER.info("slave skip scan, revive topic={}, reviveQueueId={}", reviveTopic, queueId);
break;
}
List<MessageExt> messageExts = getReviveMessage(offset, queueId);
if (messageExts == null || messageExts.isEmpty()) {
long old = endTime;
long timerDelay = brokerController.getMessageStore().getTimerMessageStore().getDequeueBehind();
long commitLogDelay = brokerController.getMessageStore().getTimerMessageStore().getEnqueueBehind();
// move endTime
if (endTime != 0 && System.currentTimeMillis() - endTime > 3 * PopAckConstants.SECOND && timerDelay <= 0 && commitLogDelay <= 0) {
endTime = System.currentTimeMillis();
}
POP_LOGGER.debug("reviveQueueId={}, offset is {}, can not get new msg, old endTime {}, new endTime {}, timerDelay={}, commitLogDelay={} ",
queueId, offset, old, endTime, timerDelay, commitLogDelay);
if (endTime - firstRt > PopAckConstants.ackTimeInterval + PopAckConstants.SECOND) {
break;
}
noMsgCount++;
// Fixme: why sleep is useful here?
try {
Thread.sleep(100);
} catch (Throwable ignore) {
}
if (noMsgCount * 100L > 4 * PopAckConstants.SECOND) {
break;
} else {
continue;
}
} else {
noMsgCount = 0;
}
if (System.currentTimeMillis() - startScanTime > brokerController.getBrokerConfig().getReviveScanTime()) {
POP_LOGGER.info("reviveQueueId={}, scan timeout ", queueId);
break;
}
for (MessageExt messageExt : messageExts) {
if (PopAckConstants.CK_TAG.equals(messageExt.getTags())) {
String raw = new String(messageExt.getBody(), DataConverter.CHARSET_UTF8);
if (brokerController.getBrokerConfig().isEnablePopLog()) {
POP_LOGGER.info("reviveQueueId={},find ck, offset:{}, raw : {}", messageExt.getQueueId(), messageExt.getQueueOffset(), raw);
}
PopCheckPoint point = JSON.parseObject(raw, PopCheckPoint.class);
if (point.getTopic() == null || point.getCId() == null) {
continue;
}
map.put(point.getTopic() + point.getCId() + point.getQueueId() + point.getStartOffset() + point.getPopTime() + point.getBrokerName(), point);
brokerController.getBrokerMetricsManager().getPopMetricsManager().incPopReviveCkGetCount(point, queueId);
point.setReviveOffset(messageExt.getQueueOffset());
if (firstRt == 0) {
firstRt = point.getReviveTime();
}
} else if (PopAckConstants.ACK_TAG.equals(messageExt.getTags())) {
String raw = new String(messageExt.getBody(), StandardCharsets.UTF_8);
if (brokerController.getBrokerConfig().isEnablePopLog()) {
POP_LOGGER.info("reviveQueueId={}, find ack, offset:{}, raw : {}", messageExt.getQueueId(), messageExt.getQueueOffset(), raw);
}
AckMsg ackMsg = JSON.parseObject(raw, AckMsg.class);
brokerController.getBrokerMetricsManager().getPopMetricsManager().incPopReviveAckGetCount(ackMsg, queueId);
String brokerName = StringUtils.isNotBlank(ackMsg.getBrokerName()) ?
ackMsg.getBrokerName() : brokerController.getBrokerConfig().getBrokerName();
String mergeKey = ackMsg.getTopic() + ackMsg.getConsumerGroup() + ackMsg.getQueueId() + ackMsg.getStartOffset() + ackMsg.getPopTime() + brokerName;
PopCheckPoint point = map.get(mergeKey);
if (point == null) {
if (!brokerController.getBrokerConfig().isEnableSkipLongAwaitingAck()) {
continue;
}
if (mockCkForAck(messageExt, ackMsg, mergeKey, mockPointMap) && firstRt == 0) {
firstRt = mockPointMap.get(mergeKey).getReviveTime();
}
} else {
int indexOfAck = point.indexOfAck(ackMsg.getAckOffset());
if (indexOfAck > -1) {
point.setBitMap(DataConverter.setBit(point.getBitMap(), indexOfAck, true));
} else {
POP_LOGGER.error("invalid ack index, {}, {}", ackMsg, point);
}
}
} else if (PopAckConstants.BATCH_ACK_TAG.equals(messageExt.getTags())) {
String raw = new String(messageExt.getBody(), StandardCharsets.UTF_8);
if (brokerController.getBrokerConfig().isEnablePopLog()) {
POP_LOGGER.info("reviveQueueId={}, find batch ack, offset:{}, raw : {}", messageExt.getQueueId(), messageExt.getQueueOffset(), raw);
}
BatchAckMsg bAckMsg = JSON.parseObject(raw, BatchAckMsg.class);
brokerController.getBrokerMetricsManager().getPopMetricsManager().incPopReviveAckGetCount(bAckMsg, queueId);
String brokerName = StringUtils.isNotBlank(bAckMsg.getBrokerName()) ?
bAckMsg.getBrokerName() : brokerController.getBrokerConfig().getBrokerName();
String mergeKey = bAckMsg.getTopic() + bAckMsg.getConsumerGroup() + bAckMsg.getQueueId() + bAckMsg.getStartOffset() + bAckMsg.getPopTime() + brokerName;
PopCheckPoint point = map.get(mergeKey);
if (point == null) {
if (!brokerController.getBrokerConfig().isEnableSkipLongAwaitingAck()) {
continue;
}
if (mockCkForAck(messageExt, bAckMsg, mergeKey, mockPointMap) && firstRt == 0) {
firstRt = mockPointMap.get(mergeKey).getReviveTime();
}
} else {
List<Long> ackOffsetList = bAckMsg.getAckOffsetList();
for (Long ackOffset : ackOffsetList) {
int indexOfAck = point.indexOfAck(ackOffset);
if (indexOfAck > -1) {
point.setBitMap(DataConverter.setBit(point.getBitMap(), indexOfAck, true));
} else {
POP_LOGGER.error("invalid batch ack index, {}, {}", bAckMsg, point);
}
}
}
}
long deliverTime = messageExt.getDeliverTimeMs();
if (deliverTime > endTime) {
endTime = deliverTime;
}
}
offset = offset + messageExts.size();
}
consumeReviveObj.map.putAll(mockPointMap);
consumeReviveObj.endTime = endTime;
}
private boolean mockCkForAck(MessageExt messageExt, AckMsg ackMsg, String mergeKey, HashMap<String, PopCheckPoint> mockPointMap) {
long ackWaitTime = System.currentTimeMillis() - messageExt.getDeliverTimeMs();
long reviveAckWaitMs = brokerController.getBrokerConfig().getReviveAckWaitMs();
if (ackWaitTime > reviveAckWaitMs) {
// will use the reviveOffset of popCheckPoint to commit offset in mergeAndRevive
PopCheckPoint mockPoint = createMockCkForAck(ackMsg, messageExt.getQueueOffset());
POP_LOGGER.warn(
"ack wait for {}ms cannot find ck, skip this ack. mergeKey:{}, ack:{}, mockCk:{}",
reviveAckWaitMs, mergeKey, ackMsg, mockPoint);
mockPointMap.put(mergeKey, mockPoint);
return true;
}
return false;
}
private PopCheckPoint createMockCkForAck(AckMsg ackMsg, long reviveOffset) {
PopCheckPoint point = new PopCheckPoint();
point.setStartOffset(ackMsg.getStartOffset());
point.setPopTime(ackMsg.getPopTime());
point.setQueueId(ackMsg.getQueueId());
point.setCId(ackMsg.getConsumerGroup());
point.setTopic(ackMsg.getTopic());
point.setNum((byte) 0);
point.setBitMap(0);
point.setReviveOffset(reviveOffset);
point.setBrokerName(ackMsg.getBrokerName());
return point;
}
protected void mergeAndRevive(ConsumeReviveObj consumeReviveObj) throws Throwable {
ArrayList<PopCheckPoint> sortList = consumeReviveObj.genSortList();
POP_LOGGER.info("reviveQueueId={}, ck listSize={}", queueId, sortList.size());
if (sortList.size() != 0) {
POP_LOGGER.info("reviveQueueId={}, 1st ck, startOffset={}, reviveOffset={}; last ck, startOffset={}, reviveOffset={}", queueId, sortList.get(0).getStartOffset(),
sortList.get(0).getReviveOffset(), sortList.get(sortList.size() - 1).getStartOffset(), sortList.get(sortList.size() - 1).getReviveOffset());
}
long newOffset = consumeReviveObj.oldOffset;
for (PopCheckPoint popCheckPoint : sortList) {
if (!shouldRunPopRevive) {
POP_LOGGER.info("slave skip ck process, revive topic={}, reviveQueueId={}", reviveTopic, queueId);
break;
}
if (consumeReviveObj.endTime - popCheckPoint.getReviveTime() <= (PopAckConstants.ackTimeInterval + PopAckConstants.SECOND)) {
break;
}
// check normal topic, skip ck , if normal topic is not exist
String normalTopic = KeyBuilder.parseNormalTopic(popCheckPoint.getTopic(), popCheckPoint.getCId());
if (brokerController.getTopicConfigManager().selectTopicConfig(normalTopic) == null) {
POP_LOGGER.warn("reviveQueueId={}, can not get normal topic {}, then continue", queueId, popCheckPoint.getTopic());
newOffset = popCheckPoint.getReviveOffset();
continue;
}
if (null == brokerController.getSubscriptionGroupManager().findSubscriptionGroupConfig(popCheckPoint.getCId())) {
POP_LOGGER.warn("reviveQueueId={}, can not get cid {}, then continue", queueId, popCheckPoint.getCId());
newOffset = popCheckPoint.getReviveOffset();
continue;
}
while (inflightReviveRequestMap.size() > 3) {
waitForRunning(100);
Pair<Long, Boolean> pair = inflightReviveRequestMap.firstEntry().getValue();
if (!pair.getObject2() && System.currentTimeMillis() - pair.getObject1() > 1000 * 30) {
PopCheckPoint oldCK = inflightReviveRequestMap.firstKey();
rePutCK(oldCK, pair);
inflightReviveRequestMap.remove(oldCK);
POP_LOGGER.warn("stay too long, remove from reviveRequestMap, {}, {}, {}, {}", popCheckPoint.getTopic(),
popCheckPoint.getBrokerName(), popCheckPoint.getQueueId(), popCheckPoint.getStartOffset());
}
}
reviveMsgFromCk(popCheckPoint);
newOffset = popCheckPoint.getReviveOffset();
}
if (newOffset > consumeReviveObj.oldOffset) {
if (!shouldRunPopRevive) {
POP_LOGGER.info("slave skip commit, revive topic={}, reviveQueueId={}", reviveTopic, queueId);
return;
}
this.brokerController.getConsumerOffsetManager().commitOffset(PopAckConstants.LOCAL_HOST, PopAckConstants.REVIVE_GROUP, reviveTopic, queueId, newOffset);
}
reviveOffset = newOffset;
consumeReviveObj.newOffset = newOffset;
}
private void reviveMsgFromCk(PopCheckPoint popCheckPoint) {
if (!shouldRunPopRevive) {
POP_LOGGER.info("slave skip retry, revive topic={}, reviveQueueId={}", reviveTopic, queueId);
return;
}
inflightReviveRequestMap.put(popCheckPoint, new Pair<>(System.currentTimeMillis(), false));
List<CompletableFuture<Pair<Long, Boolean>>> futureList = new ArrayList<>(popCheckPoint.getNum());
for (int j = 0; j < popCheckPoint.getNum(); j++) {
if (DataConverter.getBit(popCheckPoint.getBitMap(), j)) {
continue;
}
// retry msg
long msgOffset = popCheckPoint.ackOffsetByIndex((byte) j);
CompletableFuture<Pair<Long, Boolean>> future = getBizMessage(popCheckPoint, msgOffset)
.thenApply(rst -> {
MessageExt message = rst.getLeft();
if (message == null) {
POP_LOGGER.info("reviveQueueId={}, can not get biz msg, topic:{}, qid:{}, offset:{}, brokerName:{}, info:{}, retry:{}, then continue",
queueId, popCheckPoint.getTopic(), popCheckPoint.getQueueId(), msgOffset, popCheckPoint.getBrokerName(), UtilAll.frontStringAtLeast(rst.getMiddle(), 60), rst.getRight());
return new Pair<>(msgOffset, !rst.getRight()); // Pair.object2 means OK or not, Triple.right value means needRetry
}
boolean result = reviveRetry(popCheckPoint, message);
return new Pair<>(msgOffset, result);
});
futureList.add(future);
}
CompletableFuture.allOf(futureList.toArray(new CompletableFuture[0]))
.whenComplete((v, e) -> {
for (CompletableFuture<Pair<Long, Boolean>> future : futureList) {
Pair<Long, Boolean> pair = future.getNow(new Pair<>(0L, false));
if (!pair.getObject2()) {
rePutCK(popCheckPoint, pair);
}
}
if (inflightReviveRequestMap.containsKey(popCheckPoint)) {
inflightReviveRequestMap.get(popCheckPoint).setObject2(true);
}
for (Map.Entry<PopCheckPoint, Pair<Long, Boolean>> entry : inflightReviveRequestMap.entrySet()) {
PopCheckPoint oldCK = entry.getKey();
Pair<Long, Boolean> pair = entry.getValue();
if (pair.getObject2()) {
brokerController.getConsumerOffsetManager().commitOffset(PopAckConstants.LOCAL_HOST, PopAckConstants.REVIVE_GROUP, reviveTopic, queueId, oldCK.getReviveOffset());
inflightReviveRequestMap.remove(oldCK);
} else {
break;
}
}
});
}
private void rePutCK(PopCheckPoint oldCK, Pair<Long, Boolean> pair) {
int rePutTimes = oldCK.parseRePutTimes();
if (rePutTimes >= ckRewriteIntervalsInSeconds.length && brokerController.getBrokerConfig().isSkipWhenCKRePutReachMaxTimes()) {
POP_LOGGER.warn("rePut CK reach max times, drop it. {}, {}, {}, {}-{}, {}, {}, {}", oldCK.getTopic(), oldCK.getCId(),
oldCK.getBrokerName(), oldCK.getQueueId(), pair.getObject1(), oldCK.getPopTime(), oldCK.getInvisibleTime(), rePutTimes);
return;
}
PopCheckPoint newCk = new PopCheckPoint();
newCk.setBitMap(0);
newCk.setNum((byte) 1);
newCk.setPopTime(oldCK.getPopTime());
newCk.setInvisibleTime(oldCK.getInvisibleTime());
newCk.setStartOffset(pair.getObject1());
newCk.setCId(oldCK.getCId());
newCk.setTopic(oldCK.getTopic());
newCk.setQueueId(oldCK.getQueueId());
newCk.setBrokerName(oldCK.getBrokerName());
newCk.addDiff(0);
newCk.setRePutTimes(String.valueOf(rePutTimes + 1)); // always increment even if removed from reviveRequestMap
if (oldCK.getReviveTime() <= System.currentTimeMillis()) {
// never expect an ACK matched in the future, we just use it to rewrite CK and try to revive retry message next time
int intervalIndex = rePutTimes >= ckRewriteIntervalsInSeconds.length ? ckRewriteIntervalsInSeconds.length - 1 : rePutTimes;
newCk.setInvisibleTime(oldCK.getInvisibleTime() + ckRewriteIntervalsInSeconds[intervalIndex] * 1000);
}
MessageExtBrokerInner ckMsg = brokerController.getPopMessageProcessor().buildCkMsg(newCk, queueId);
brokerController.getMessageStore().putMessage(ckMsg);
}
public long getReviveBehindMillis() throws ConsumeQueueException {
if (currentReviveMessageTimestamp <= 0) {
return 0;
}
long maxOffset = brokerController.getMessageStore().getMaxOffsetInQueue(reviveTopic, queueId);
if (maxOffset - reviveOffset > 1) {
return Math.max(0, System.currentTimeMillis() - currentReviveMessageTimestamp);
}
return 0;
}
public long getReviveBehindMessages() throws ConsumeQueueException {
if (currentReviveMessageTimestamp <= 0) {
return 0;
}
// the next pull offset is reviveOffset + 1
long diff = brokerController.getMessageStore().getMaxOffsetInQueue(reviveTopic, queueId) - reviveOffset - 1;
return Math.max(0, diff);
}
@Override
public void run() {
int slow = 1;
while (!this.isStopped()) {
try {
if (System.currentTimeMillis() < brokerController.getShouldStartTime()) {
POP_LOGGER.info("PopReviveService Ready to run after {}", brokerController.getShouldStartTime());
this.waitForRunning(1000);
continue;
}
this.waitForRunning(brokerController.getBrokerConfig().getReviveInterval());
if (!shouldRunPopRevive) {
POP_LOGGER.info("skip start revive topic={}, reviveQueueId={}", reviveTopic, queueId);
continue;
}
if (!brokerController.getMessageStore().getMessageStoreConfig().isTimerWheelEnable()) {
POP_LOGGER.warn("skip revive topic because timerWheelEnable is false");
continue;
}
POP_LOGGER.info("start revive topic={}, reviveQueueId={}", reviveTopic, queueId);
ConsumeReviveObj consumeReviveObj = new ConsumeReviveObj();
consumeReviveMessage(consumeReviveObj);
if (!shouldRunPopRevive) {
POP_LOGGER.info("slave skip scan, revive topic={}, reviveQueueId={}", reviveTopic, queueId);
continue;
}
mergeAndRevive(consumeReviveObj);
ArrayList<PopCheckPoint> sortList = consumeReviveObj.sortList;
long delay = 0;
if (sortList != null && !sortList.isEmpty()) {
delay = (System.currentTimeMillis() - sortList.get(0).getReviveTime()) / 1000;
currentReviveMessageTimestamp = sortList.get(0).getReviveTime();
slow = 1;
} else {
currentReviveMessageTimestamp = System.currentTimeMillis();
}
POP_LOGGER.info("reviveQueueId={}, revive finish,old offset is {}, new offset is {}, ckDelay={} ",
queueId, consumeReviveObj.oldOffset, consumeReviveObj.newOffset, delay);
if (sortList == null || sortList.isEmpty()) {
POP_LOGGER.info("reviveQueueId={}, has no new msg, take a rest {}", queueId, slow);
this.waitForRunning(slow * brokerController.getBrokerConfig().getReviveInterval());
if (slow < brokerController.getBrokerConfig().getReviveMaxSlow()) {
slow++;
}
}
} catch (Throwable e) {
POP_LOGGER.error("reviveQueueId={}, revive error", queueId, e);
}
}
}
static class ConsumeReviveObj {
HashMap<String, PopCheckPoint> map = new HashMap<>();
ArrayList<PopCheckPoint> sortList;
long oldOffset;
long endTime;
long newOffset;
ArrayList<PopCheckPoint> genSortList() {
if (sortList != null) {
return sortList;
}
sortList = new ArrayList<>(map.values());
sortList.sort((o1, o2) -> (int) (o1.getReviveOffset() - o2.getReviveOffset()));
return sortList;
}
}
}
|
openjdk/jdk8 | 37,250 | jaxp/src/com/sun/org/apache/xerces/internal/impl/Constants.java | /*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
*/
/*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.impl;
import com.sun.org.apache.xerces.internal.utils.SecuritySupport;
import java.util.Enumeration;
import java.util.NoSuchElementException;
/**
* Commonly used constants.
*
* @xerces.internal
*
* @author Andy Clark, IBM
*
* @version $Id: Constants.java,v 1.14 2010-11-01 04:39:40 joehw Exp $
*/
public final class Constants {
//
// Constants
//
// Schema Types:
public static final String NS_XMLSCHEMA = "http://www.w3.org/2001/XMLSchema".intern();
public static final String NS_DTD = "http://www.w3.org/TR/REC-xml".intern();
// Schema features
public static final String SUN_SCHEMA_FEATURE_PREFIX = "http://java.sun.com/xml/schema/features/";
public static final String SUN_REPORT_IGNORED_ELEMENT_CONTENT_WHITESPACE = "report-ignored-element-content-whitespace";
//stax properties
public static final String ZEPHYR_PROPERTY_PREFIX = "http://java.sun.com/xml/stream/properties/" ;
public static final String STAX_PROPERTIES = "stax-properties" ;
public static final String STAX_ENTITY_RESOLVER_PROPERTY = "internal/stax-entity-resolver";
public static final String STAX_REPORT_CDATA_EVENT = "report-cdata-event";
public static final String READER_IN_DEFINED_STATE = ZEPHYR_PROPERTY_PREFIX + "reader-in-defined-state" ;
public static final String ADD_NAMESPACE_DECL_AS_ATTRIBUTE = "add-namespacedecl-as-attrbiute";
public static final String ESCAPE_CHARACTERS = "escapeCharacters";
public static final String REUSE_INSTANCE = "reuse-instance" ;
//DOM properties
public static final String SUN_DOM_PROPERTY_PREFIX = "http://java.sun.com/xml/dom/properties/" ;
public static final String SUN_DOM_ANCESTOR_CHECCK = "ancestor-check";
/**
* If true, ignore DOCTYPE declaration as if it wasn't present at all.
* Note that this is a violation of the XML recommendation.
* The full property name is prefixed by {@link #ZEPHYR_PROPERTY_PREFIX}.
*/
public static final String IGNORE_EXTERNAL_DTD = "ignore-external-dtd";
// sax features
/** SAX feature prefix ("http://xml.org/sax/features/"). */
public static final String SAX_FEATURE_PREFIX = "http://xml.org/sax/features/";
public static final String NAMESPACES_FEATURE = "namespaces";
/** Namespace prefixes feature ("namespace-prefixes"). */
public static final String NAMESPACE_PREFIXES_FEATURE = "namespace-prefixes";
/** String interning feature ("string-interning"). */
public static final String STRING_INTERNING_FEATURE = "string-interning";
/** Validation feature ("validation"). */
public static final String VALIDATION_FEATURE = "validation";
/** External general entities feature ("external-general-entities "). */
public static final String EXTERNAL_GENERAL_ENTITIES_FEATURE = "external-general-entities";
/** External parameter entities feature ("external-parameter-entities "). */
public static final String EXTERNAL_PARAMETER_ENTITIES_FEATURE = "external-parameter-entities";
/** Lexical handler parameter entities feature ("lexical-handler/parameter-entities"). */
public static final String LEXICAL_HANDLER_PARAMETER_ENTITIES_FEATURE = "lexical-handler/parameter-entities";
/** Is standalone feature ("is-standalone"). */
public static final String IS_STANDALONE_FEATURE = "is-standalone";
/** Resolve DTD URIs feature ("resolve-dtd-uris"). */
public static final String RESOLVE_DTD_URIS_FEATURE = "resolve-dtd-uris";
/** Use Attributes2 feature ("use-attributes2"). */
public static final String USE_ATTRIBUTES2_FEATURE = "use-attributes2";
/** Use Locator2 feature ("use-locator2"). */
public static final String USE_LOCATOR2_FEATURE = "use-locator2";
/** Use EntityResolver2 feature ("use-entity-resolver2"). */
public static final String USE_ENTITY_RESOLVER2_FEATURE = "use-entity-resolver2";
/** Unicode normalization checking feature ("unicode-normalization-checking"). */
public static final String UNICODE_NORMALIZATION_CHECKING_FEATURE = "unicode-normalization-checking";
/** xmlns URIs feature ("xmlns-uris"). */
public static final String XMLNS_URIS_FEATURE = "xmlns-uris";
/** XML 1.1 feature ("xml-1.1"). */
public static final String XML_11_FEATURE = "xml-1.1";
/** Allow unparsed entity and notation declaration events to be sent after the end DTD event ("allow-dtd-events-after-endDTD") */
public static final String ALLOW_DTD_EVENTS_AFTER_ENDDTD_FEATURE = "allow-dtd-events-after-endDTD";
// sax properties
/** SAX property prefix ("http://xml.org/sax/properties/"). */
public static final String SAX_PROPERTY_PREFIX = "http://xml.org/sax/properties/";
/** Declaration handler property ("declaration-handler"). */
public static final String DECLARATION_HANDLER_PROPERTY = "declaration-handler";
/** Lexical handler property ("lexical-handler"). */
public static final String LEXICAL_HANDLER_PROPERTY = "lexical-handler";
/** DOM node property ("dom-node"). */
public static final String DOM_NODE_PROPERTY = "dom-node";
/** XML string property ("xml-string"). */
public static final String XML_STRING_PROPERTY = "xml-string";
public static final String FEATURE_SECURE_PROCESSING = "http://javax.xml.XMLConstants/feature/secure-processing";
// Oracle Feature:
/**
* <p>Use Service Mechanism</p>
*
* <ul>
* <li>
* {@code true} instruct an object to use service mechanism to
* find a service implementation. This is the default behavior.
* </li>
* <li>
* {@code false} instruct an object to skip service mechanism and
* use the default implementation for that service.
* </li>
* </ul>
*/
public static final String ORACLE_FEATURE_SERVICE_MECHANISM = "http://www.oracle.com/feature/use-service-mechanism";
/** Document XML version property ("document-xml-version"). */
public static final String DOCUMENT_XML_VERSION_PROPERTY = "document-xml-version";
//
// JAXP properties
//
/** JAXP property prefix ("http://java.sun.com/xml/jaxp/properties/"). */
public static final String JAXP_PROPERTY_PREFIX =
"http://java.sun.com/xml/jaxp/properties/";
/** JAXP schemaSource property: when used internally may include DTD sources (DOM) */
public static final String SCHEMA_SOURCE = "schemaSource";
/** JAXP schemaSource language: when used internally may include DTD namespace (DOM) */
public static final String SCHEMA_LANGUAGE = "schemaLanguage";
/** JAXP Standard property prefix ("http://javax.xml.XMLConstants/property/"). */
public static final String JAXPAPI_PROPERTY_PREFIX =
"http://javax.xml.XMLConstants/property/";
/** Oracle JAXP property prefix ("http://www.oracle.com/xml/jaxp/properties/"). */
public static final String ORACLE_JAXP_PROPERTY_PREFIX =
"http://www.oracle.com/xml/jaxp/properties/";
public static final String XML_SECURITY_PROPERTY_MANAGER =
ORACLE_JAXP_PROPERTY_PREFIX + "xmlSecurityPropertyManager";
//System Properties corresponding to ACCESS_EXTERNAL_* properties
public static final String SP_ACCESS_EXTERNAL_DTD = "javax.xml.accessExternalDTD";
public static final String SP_ACCESS_EXTERNAL_SCHEMA = "javax.xml.accessExternalSchema";
//all access keyword
public static final String ACCESS_EXTERNAL_ALL = "all";
/**
* Default value when FEATURE_SECURE_PROCESSING (FSP) is set to true
*/
public static final String EXTERNAL_ACCESS_DEFAULT_FSP = "";
/**
* FEATURE_SECURE_PROCESSING (FSP) is true by default
*/
public static final String EXTERNAL_ACCESS_DEFAULT = ACCESS_EXTERNAL_ALL;
/**
* Check if we're in jdk8 or above
*/
public static final boolean IS_JDK8_OR_ABOVE = isJavaVersionAtLeast(8);
//
// Implementation limits: corresponding System Properties of the above
// API properties
//
/**
* JDK entity expansion limit; Note that the existing system property
* "entityExpansionLimit" with no prefix is still observed
*/
public static final String JDK_ENTITY_EXPANSION_LIMIT =
ORACLE_JAXP_PROPERTY_PREFIX + "entityExpansionLimit";
/**
* JDK element attribute limit; Note that the existing system property
* "elementAttributeLimit" with no prefix is still observed
*/
public static final String JDK_ELEMENT_ATTRIBUTE_LIMIT =
ORACLE_JAXP_PROPERTY_PREFIX + "elementAttributeLimit";
/**
* JDK maxOccur limit; Note that the existing system property
* "maxOccurLimit" with no prefix is still observed
*/
public static final String JDK_MAX_OCCUR_LIMIT =
ORACLE_JAXP_PROPERTY_PREFIX + "maxOccurLimit";
/**
* JDK total entity size limit
*/
public static final String JDK_TOTAL_ENTITY_SIZE_LIMIT =
ORACLE_JAXP_PROPERTY_PREFIX + "totalEntitySizeLimit";
/**
* JDK maximum general entity size limit
*/
public static final String JDK_GENERAL_ENTITY_SIZE_LIMIT =
ORACLE_JAXP_PROPERTY_PREFIX + "maxGeneralEntitySizeLimit";
/**
* JDK maximum parameter entity size limit
*/
public static final String JDK_PARAMETER_ENTITY_SIZE_LIMIT =
ORACLE_JAXP_PROPERTY_PREFIX + "maxParameterEntitySizeLimit";
/**
* JDK maximum XML name limit
*/
public static final String JDK_XML_NAME_LIMIT =
ORACLE_JAXP_PROPERTY_PREFIX + "maxXMLNameLimit";
/**
* JDK property to allow printing out information from the limit analyzer
*/
public static final String JDK_ENTITY_COUNT_INFO =
ORACLE_JAXP_PROPERTY_PREFIX + "getEntityCountInfo";
//
// Implementation limits: API properties
//
/**
* JDK entity expansion limit; Note that the existing system property
* "entityExpansionLimit" with no prefix is still observed
*/
public static final String SP_ENTITY_EXPANSION_LIMIT = "jdk.xml.entityExpansionLimit";
/**
* JDK element attribute limit; Note that the existing system property
* "elementAttributeLimit" with no prefix is still observed
*/
public static final String SP_ELEMENT_ATTRIBUTE_LIMIT = "jdk.xml.elementAttributeLimit";
/**
* JDK maxOccur limit; Note that the existing system property
* "maxOccurLimit" with no prefix is still observed
*/
public static final String SP_MAX_OCCUR_LIMIT = "jdk.xml.maxOccurLimit";
/**
* JDK total entity size limit
*/
public static final String SP_TOTAL_ENTITY_SIZE_LIMIT = "jdk.xml.totalEntitySizeLimit";
/**
* JDK maximum general entity size limit
*/
public static final String SP_GENERAL_ENTITY_SIZE_LIMIT = "jdk.xml.maxGeneralEntitySizeLimit";
/**
* JDK maximum parameter entity size limit
*/
public static final String SP_PARAMETER_ENTITY_SIZE_LIMIT = "jdk.xml.maxParameterEntitySizeLimit";
/**
* JDK maximum XML name limit
*/
public static final String SP_XML_NAME_LIMIT = "jdk.xml.maxXMLNameLimit";
//legacy System Properties
public final static String ENTITY_EXPANSION_LIMIT = "entityExpansionLimit";
public static final String ELEMENT_ATTRIBUTE_LIMIT = "elementAttributeLimit" ;
public final static String MAX_OCCUR_LIMIT = "maxOccurLimit";
/**
* A string "yes" that can be used for properties such as getEntityCountInfo
*/
public static final String JDK_YES = "yes";
//
// DOM features
//
/** Comments feature ("include-comments"). */
public static final String INCLUDE_COMMENTS_FEATURE = "include-comments";
/** Create cdata nodes feature ("create-cdata-nodes"). */
public static final String CREATE_CDATA_NODES_FEATURE = "create-cdata-nodes";
/** Feature id: load as infoset. */
public static final String LOAD_AS_INFOSET = "load-as-infoset";
//
// Constants: DOM Level 3 feature ids
//
public static final String DOM_CANONICAL_FORM = "canonical-form";
public static final String DOM_CDATA_SECTIONS ="cdata-sections";
public static final String DOM_COMMENTS = "comments";
// REVISIT: this feature seems to have no effect for Xerces
public static final String DOM_CHARSET_OVERRIDES_XML_ENCODING =
"charset-overrides-xml-encoding";
public static final String DOM_DATATYPE_NORMALIZATION = "datatype-normalization";
public static final String DOM_ENTITIES = "entities";
public static final String DOM_INFOSET = "infoset";
public static final String DOM_NAMESPACES = "namespaces";
public static final String DOM_NAMESPACE_DECLARATIONS = "namespace-declarations";
public static final String DOM_SUPPORTED_MEDIATYPES_ONLY =
"supported-media-types-only";
public static final String DOM_VALIDATE_IF_SCHEMA = "validate-if-schema";
public static final String DOM_VALIDATE = "validate";
public static final String DOM_ELEMENT_CONTENT_WHITESPACE =
"element-content-whitespace";
// DOM Level 3 features defined in Core:
public static final String DOM_DISCARD_DEFAULT_CONTENT = "discard-default-content";
public static final String DOM_NORMALIZE_CHARACTERS = "normalize-characters";
public static final String DOM_CHECK_CHAR_NORMALIZATION = "check-character-normalization";
public static final String DOM_WELLFORMED = "well-formed";
public static final String DOM_SPLIT_CDATA = "split-cdata-sections";
// Load and Save
public static final String DOM_FORMAT_PRETTY_PRINT = "format-pretty-print";
public static final String DOM_XMLDECL = "xml-declaration";
public static final String DOM_UNKNOWNCHARS = "unknown-characters";
public static final String DOM_CERTIFIED = "certified";
public static final String DOM_DISALLOW_DOCTYPE = "disallow-doctype";
public static final String DOM_IGNORE_UNKNOWN_CHARACTER_DENORMALIZATIONS = "ignore-unknown-character-denormalizations";
// DOM Properties
public static final String DOM_RESOURCE_RESOLVER = "resource-resolver";
public static final String DOM_ERROR_HANDLER = "error-handler";
public static final String DOM_SCHEMA_TYPE = "schema-type";
public static final String DOM_SCHEMA_LOCATION = "schema-location";
public static final String DOM_ANCESTOR_CHECCK = "ancestor-check";
// XSModel
public static final String DOM_PSVI = "psvi";
// xerces features
/** Xerces features prefix ("http://apache.org/xml/features/"). */
public static final String XERCES_FEATURE_PREFIX = "http://apache.org/xml/features/";
/** Schema validation feature ("validation/schema"). */
public static final String SCHEMA_VALIDATION_FEATURE = "validation/schema";
/** Expose schema normalized values */
public static final String SCHEMA_NORMALIZED_VALUE = "validation/schema/normalized-value";
/** Send schema default value via characters() */
public static final String SCHEMA_ELEMENT_DEFAULT = "validation/schema/element-default";
/** Schema full constraint checking ("validation/schema-full-checking"). */
public static final String SCHEMA_FULL_CHECKING = "validation/schema-full-checking";
/** Augment Post-Schema-Validation-Infoset */
public static final String SCHEMA_AUGMENT_PSVI = "validation/schema/augment-psvi";
/** Dynamic validation feature ("validation/dynamic"). */
public static final String DYNAMIC_VALIDATION_FEATURE = "validation/dynamic";
/** Warn on duplicate attribute declaration feature ("validation/warn-on-duplicate-attdef"). */
public static final String WARN_ON_DUPLICATE_ATTDEF_FEATURE = "validation/warn-on-duplicate-attdef";
/** Warn on undeclared element feature ("validation/warn-on-undeclared-elemdef"). */
public static final String WARN_ON_UNDECLARED_ELEMDEF_FEATURE = "validation/warn-on-undeclared-elemdef";
/** Warn on duplicate entity declaration feature ("warn-on-duplicate-entitydef"). */
public static final String WARN_ON_DUPLICATE_ENTITYDEF_FEATURE = "warn-on-duplicate-entitydef";
/** Allow Java encoding names feature ("allow-java-encodings"). */
public static final String ALLOW_JAVA_ENCODINGS_FEATURE = "allow-java-encodings";
/** Disallow DOCTYPE declaration feature ("disallow-doctype-decl"). */
public static final String DISALLOW_DOCTYPE_DECL_FEATURE = "disallow-doctype-decl";
/** Continue after fatal error feature ("continue-after-fatal-error"). */
public static final String CONTINUE_AFTER_FATAL_ERROR_FEATURE = "continue-after-fatal-error";
/** Load dtd grammar when nonvalidating feature ("nonvalidating/load-dtd-grammar"). */
public static final String LOAD_DTD_GRAMMAR_FEATURE = "nonvalidating/load-dtd-grammar";
/** Load external dtd when nonvalidating feature ("nonvalidating/load-external-dtd"). */
public static final String LOAD_EXTERNAL_DTD_FEATURE = "nonvalidating/load-external-dtd";
/** Defer node expansion feature ("dom/defer-node-expansion"). */
public static final String DEFER_NODE_EXPANSION_FEATURE = "dom/defer-node-expansion";
/** Create entity reference nodes feature ("dom/create-entity-ref-nodes"). */
public static final String CREATE_ENTITY_REF_NODES_FEATURE = "dom/create-entity-ref-nodes";
/** Include ignorable whitespace feature ("dom/include-ignorable-whitespace"). */
public static final String INCLUDE_IGNORABLE_WHITESPACE = "dom/include-ignorable-whitespace";
/** Default attribute values feature ("validation/default-attribute-values"). */
public static final String DEFAULT_ATTRIBUTE_VALUES_FEATURE = "validation/default-attribute-values";
/** Validate content models feature ("validation/validate-content-models"). */
public static final String VALIDATE_CONTENT_MODELS_FEATURE = "validation/validate-content-models";
/** Validate datatypes feature ("validation/validate-datatypes"). */
public static final String VALIDATE_DATATYPES_FEATURE = "validation/validate-datatypes";
/** Balance syntax trees feature ("validation/balance-syntax-trees"). */
public static final String BALANCE_SYNTAX_TREES = "validation/balance-syntax-trees";
/** Notify character references feature (scanner/notify-char-refs"). */
public static final String NOTIFY_CHAR_REFS_FEATURE = "scanner/notify-char-refs";
/** Notify built-in (&amp;, etc.) references feature (scanner/notify-builtin-refs"). */
public static final String NOTIFY_BUILTIN_REFS_FEATURE = "scanner/notify-builtin-refs";
/** Standard URI conformant feature ("standard-uri-conformant"). */
public static final String STANDARD_URI_CONFORMANT_FEATURE = "standard-uri-conformant";
/** Generate synthetic annotations feature ("generate-synthetic-annotations"). */
public static final String GENERATE_SYNTHETIC_ANNOTATIONS_FEATURE = "generate-synthetic-annotations";
/** Validate annotations feature ("validate-annotations"). */
public static final String VALIDATE_ANNOTATIONS_FEATURE = "validate-annotations";
/** Honour all schemaLocations feature ("honour-all-schemaLocations"). */
public static final String HONOUR_ALL_SCHEMALOCATIONS_FEATURE = "honour-all-schemaLocations";
/** Namespace growth feature ("namespace-growth"). */
public static final String NAMESPACE_GROWTH_FEATURE = "namespace-growth";
/** Tolerate duplicates feature ("internal/tolerate-duplicates"). */
public static final String TOLERATE_DUPLICATES_FEATURE = "internal/tolerate-duplicates";
/** XInclude processing feature ("xinclude"). */
public static final String XINCLUDE_FEATURE = "xinclude";
/** XInclude fixup base URIs feature ("xinclude/fixup-base-uris"). */
public static final String XINCLUDE_FIXUP_BASE_URIS_FEATURE = "xinclude/fixup-base-uris";
/** XInclude fixup language feature ("xinclude/fixup-language"). */
public static final String XINCLUDE_FIXUP_LANGUAGE_FEATURE = "xinclude/fixup-language";
/**
* Internal feature. When set to true the schema validator will only use
* schema components from the grammar pool provided.
*/
public static final String USE_GRAMMAR_POOL_ONLY_FEATURE = "internal/validation/schema/use-grammar-pool-only";
/** Internal performance related feature:
* false - the parser settings (features/properties) have not changed between 2 parses
* true - the parser settings have changed between 2 parses
* NOTE: this feature should only be set by the parser configuration.
*/
public static final String PARSER_SETTINGS = "internal/parser-settings";
/** Feature to make XML Processor XInclude Aware */
public static final String XINCLUDE_AWARE = "xinclude-aware";
/** Ignore xsi:schemaLocation and xsi:noNamespaceSchemaLocation. */
public static final String IGNORE_SCHEMA_LOCATION_HINTS = "validation/schema/ignore-schema-location-hints";
/**
* When true, the schema processor will change characters events
* to ignorableWhitespaces events, when characters are expected to
* only contain ignorable whitespaces.
*/
public static final String CHANGE_IGNORABLE_CHARACTERS_INTO_IGNORABLE_WHITESPACES =
"validation/change-ignorable-characters-into-ignorable-whitespaces";
// xerces properties
/** Xerces properties prefix ("http://apache.org/xml/properties/"). */
public static final String XERCES_PROPERTY_PREFIX = "http://apache.org/xml/properties/";
/** Current element node property ("dom/current-element-node"). */
public static final String CURRENT_ELEMENT_NODE_PROPERTY = "dom/current-element-node";
/** Document class name property ("dom/document-class-name"). */
public static final String DOCUMENT_CLASS_NAME_PROPERTY = "dom/document-class-name";
/** Symbol table property ("internal/symbol-table"). */
public static final String SYMBOL_TABLE_PROPERTY = "internal/symbol-table";
/** Error reporter property ("internal/error-reporter"). */
public static final String ERROR_REPORTER_PROPERTY = "internal/error-reporter";
/** Error handler property ("internal/error-handler"). */
public static final String ERROR_HANDLER_PROPERTY = "internal/error-handler";
/** XInclude handler property ("internal/xinclude-handler"). */
public static final String XINCLUDE_HANDLER_PROPERTY = "internal/xinclude-handler";
/** XPointer handler property ("internal/xpointer-handler"). */
public static final String XPOINTER_HANDLER_PROPERTY = "internal/xpointer-handler";
/** Entity manager property ("internal/entity-manager"). */
public static final String ENTITY_MANAGER_PROPERTY = "internal/entity-manager";
/** Input buffer size property ("input-buffer-size"). */
public static final String BUFFER_SIZE_PROPERTY = "input-buffer-size";
/** Security manager property ("security-manager"). */
public static final String SECURITY_MANAGER_PROPERTY = "security-manager";
/** Locale property ("locale"). */
public static final String LOCALE_PROPERTY = "locale";
/** property identifier: security manager. */
public static final String SECURITY_MANAGER =
Constants.XERCES_PROPERTY_PREFIX + Constants.SECURITY_MANAGER_PROPERTY;
public static final String ENTITY_RESOLVER_PROPERTY = "internal/entity-resolver";
/** Grammar pool property ("internal/grammar-pool"). */
public static final String XMLGRAMMAR_POOL_PROPERTY = "internal/grammar-pool";
/** Datatype validator factory ("internal/datatype-validator-factory"). */
public static final String DATATYPE_VALIDATOR_FACTORY_PROPERTY = "internal/datatype-validator-factory";
/** Document scanner property ("internal/document-scanner"). */
public static final String DOCUMENT_SCANNER_PROPERTY = "internal/document-scanner";
/** DTD scanner property ("internal/dtd-scanner"). */
public static final String DTD_SCANNER_PROPERTY = "internal/dtd-scanner";
/** DTD processor property ("internal/dtd-processor"). */
public static final String DTD_PROCESSOR_PROPERTY = "internal/dtd-processor";
/** Validator property ("internal/validator"). */
public static final String VALIDATOR_PROPERTY = "internal/validator";
/** Validator property ("internal/validator/dtd"). */
public static final String DTD_VALIDATOR_PROPERTY = "internal/validator/dtd";
/** Validator property ("internal/validator/schema"). */
public static final String SCHEMA_VALIDATOR_PROPERTY = "internal/validator/schema";
/** No namespace schema location property ("schema/external-schemaLocation"). */
public static final String SCHEMA_LOCATION = "schema/external-schemaLocation";
/** Schema location property ("schema/external-noNamespaceSchemaLocation"). */
public static final String SCHEMA_NONS_LOCATION = "schema/external-noNamespaceSchemaLocation";
/** Namespace binder property ("internal/namespace-binder"). */
public static final String NAMESPACE_BINDER_PROPERTY = "internal/namespace-binder";
/** Namespace context property ("internal/namespace-context"). */
public static final String NAMESPACE_CONTEXT_PROPERTY = "internal/namespace-context";
/** Validation manager property ("internal/validation-manager"). */
public static final String VALIDATION_MANAGER_PROPERTY = "internal/validation-manager";
/** XPointer Schema property ("xpointer-schema"). */
public static final String XPOINTER_SCHEMA_PROPERTY = "xpointer-schema";
/** Schema element declaration for the root element in a document ("internal/validation/schema/dv-factory"). */
public static final String SCHEMA_DV_FACTORY_PROPERTY = "internal/validation/schema/dv-factory";
// general constants
/** Element PSVI is stored in augmentations using string "ELEMENT_PSVI" */
public final static String ELEMENT_PSVI = "ELEMENT_PSVI";
/** Attribute PSVI is stored in augmentations using string "ATTRIBUTE_PSVI" */
public final static String ATTRIBUTE_PSVI = "ATTRIBUTE_PSVI";
/**
* Boolean indicating whether an attribute is declared in the DTD is stored
* in augmentations using the string "ATTRIBUTE_DECLARED". The absence of this
* augmentation indicates that the attribute was not declared in the DTD.
*/
public final static String ATTRIBUTE_DECLARED = "ATTRIBUTE_DECLARED";
/**
* {@link org.w3c.dom.TypeInfo} associated with current element/attribute
* is stored in augmentations using this string as the key.
*
* This will ultimately controls {@link com.sun.org.apache.xerces.internal.parsers.AbstractDOMParser}
* regarding what object the DOM will return from
* {@link org.w3c.dom.Attr#getSchemaTypeInfo()} and
* {@link org.w3c.dom.Element#getSchemaTypeInfo()} and
*/
public final static String TYPEINFO = "org.w3c.dom.TypeInfo";
/**
* Whether an attribute is an id or not is stored in augmentations
* using this string as the key. The value is {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
*
* This will ultimately controls {@link com.sun.org.apache.xerces.internal.parsers.AbstractDOMParser}
* about whether it will mark an attribute as ID or not.
*/
public final static String ID_ATTRIBUTE = "ID_ATTRIBUTE";
// XML version constants
/**
* Boolean indicating whether an entity referenced in the document has
* not been read is stored in augmentations using the string "ENTITY_SKIPPED".
* The absence of this augmentation indicates that the entity had a
* declaration and was expanded.
*/
public final static String ENTITY_SKIPPED = "ENTITY_SKIPPED";
/**
* Boolean indicating whether a character is a probable white space
* character (ch <= 0x20) that was the replacement text of a character
* reference is stored in augmentations using the string "CHAR_REF_PROBABLE_WS".
* The absence of this augmentation indicates that the character is not
* probable white space and/or was not included from a character reference.
*/
public final static String CHAR_REF_PROBABLE_WS = "CHAR_REF_PROBABLE_WS";
/** Boolean indicating if this entity is the last opened entity.
*
*@see com.sun.org.apache.xerces.internal.impl.XMLEntityManager#endEntity()
*@see com.sun.org.apache.xerces.internal.impl.XMLDocumentScannerImpl#endEntity()
*@see com.sun.org.apache.xerces.internal.impl.XMLDTDScannerImpl#endEntity()
*/
public final static String LAST_ENTITY = "LAST_ENTITY";
// XML version constants
public final static short XML_VERSION_ERROR = -1;
public final static short XML_VERSION_1_0 = 1;
public final static short XML_VERSION_1_1 = 2;
// DOM related constants
public final static String ANONYMOUS_TYPE_NAMESPACE =
"http://apache.org/xml/xmlschema/1.0/anonymousTypes";
// Constant to enable Schema 1.1 support
public final static boolean SCHEMA_1_1_SUPPORT = false;
public final static short SCHEMA_VERSION_1_0 = 1;
public final static short SCHEMA_VERSION_1_0_EXTENDED = 2;
// private
/** SAX features. */
private static final String[] fgSAXFeatures = {
NAMESPACES_FEATURE,
NAMESPACE_PREFIXES_FEATURE,
STRING_INTERNING_FEATURE,
VALIDATION_FEATURE,
EXTERNAL_GENERAL_ENTITIES_FEATURE,
EXTERNAL_PARAMETER_ENTITIES_FEATURE,
};
/** SAX properties. */
private static final String[] fgSAXProperties = {
DECLARATION_HANDLER_PROPERTY,
LEXICAL_HANDLER_PROPERTY,
DOM_NODE_PROPERTY,
XML_STRING_PROPERTY,
};
/** Xerces features. */
private static final String[] fgXercesFeatures = {
SCHEMA_VALIDATION_FEATURE,
SCHEMA_FULL_CHECKING,
DYNAMIC_VALIDATION_FEATURE,
WARN_ON_DUPLICATE_ATTDEF_FEATURE,
WARN_ON_UNDECLARED_ELEMDEF_FEATURE,
ALLOW_JAVA_ENCODINGS_FEATURE,
CONTINUE_AFTER_FATAL_ERROR_FEATURE,
LOAD_DTD_GRAMMAR_FEATURE,
LOAD_EXTERNAL_DTD_FEATURE,
//DEFER_NODE_EXPANSION_FEATURE,
CREATE_ENTITY_REF_NODES_FEATURE,
XINCLUDE_AWARE,
INCLUDE_IGNORABLE_WHITESPACE,
//GRAMMAR_ACCESS_FEATURE,
DEFAULT_ATTRIBUTE_VALUES_FEATURE,
VALIDATE_CONTENT_MODELS_FEATURE,
VALIDATE_DATATYPES_FEATURE,
BALANCE_SYNTAX_TREES,
NOTIFY_CHAR_REFS_FEATURE,
NOTIFY_BUILTIN_REFS_FEATURE,
DISALLOW_DOCTYPE_DECL_FEATURE,
STANDARD_URI_CONFORMANT_FEATURE,
GENERATE_SYNTHETIC_ANNOTATIONS_FEATURE,
VALIDATE_ANNOTATIONS_FEATURE,
HONOUR_ALL_SCHEMALOCATIONS_FEATURE,
XINCLUDE_FEATURE,
XINCLUDE_FIXUP_BASE_URIS_FEATURE,
XINCLUDE_FIXUP_LANGUAGE_FEATURE,
NAMESPACE_GROWTH_FEATURE,
TOLERATE_DUPLICATES_FEATURE,
};
/** Xerces properties. */
private static final String[] fgXercesProperties = {
CURRENT_ELEMENT_NODE_PROPERTY,
DOCUMENT_CLASS_NAME_PROPERTY,
SYMBOL_TABLE_PROPERTY,
ERROR_HANDLER_PROPERTY,
ERROR_REPORTER_PROPERTY,
ENTITY_MANAGER_PROPERTY,
ENTITY_RESOLVER_PROPERTY,
XMLGRAMMAR_POOL_PROPERTY,
DATATYPE_VALIDATOR_FACTORY_PROPERTY,
DOCUMENT_SCANNER_PROPERTY,
DTD_SCANNER_PROPERTY,
VALIDATOR_PROPERTY,
SCHEMA_LOCATION,
SCHEMA_NONS_LOCATION,
VALIDATION_MANAGER_PROPERTY,
BUFFER_SIZE_PROPERTY,
SECURITY_MANAGER_PROPERTY,
LOCALE_PROPERTY,
SCHEMA_DV_FACTORY_PROPERTY,
};
/** Empty enumeration. */
private static final Enumeration fgEmptyEnumeration = new ArrayEnumeration(new Object[] {});
//
// Constructors
//
/** This class cannot be instantiated. */
private Constants() {}
//
// Public methods
//
// sax
/** Returns an enumeration of the SAX features. */
public static Enumeration getSAXFeatures() {
return fgSAXFeatures.length > 0
? new ArrayEnumeration(fgSAXFeatures) : fgEmptyEnumeration;
} // getSAXFeatures():Enumeration
/** Returns an enumeration of the SAX properties. */
public static Enumeration getSAXProperties() {
return fgSAXProperties.length > 0
? new ArrayEnumeration(fgSAXProperties) : fgEmptyEnumeration;
} // getSAXProperties():Enumeration
// xerces
/** Returns an enumeration of the Xerces features. */
public static Enumeration getXercesFeatures() {
return fgXercesFeatures.length > 0
? new ArrayEnumeration(fgXercesFeatures) : fgEmptyEnumeration;
} // getXercesFeatures():Enumeration
/** Returns an enumeration of the Xerces properties. */
public static Enumeration getXercesProperties() {
return fgXercesProperties.length > 0
? new ArrayEnumeration(fgXercesProperties) : fgEmptyEnumeration;
} // getXercesProperties():Enumeration
/*
* Check the version of the current JDK against that specified in the
* parameter
*
* There is a proposal to change the java version string to:
* MAJOR.MINOR.FU.CPU.PSU-BUILDNUMBER_BUGIDNUMBER_OPTIONAL
* This method would work with both the current format and that proposed
*
* @param compareTo a JDK version to be compared to
* @return true if the current version is the same or above that represented
* by the parameter
*/
public static boolean isJavaVersionAtLeast(int compareTo) {
String javaVersion = SecuritySupport.getSystemProperty("java.version");
String versions[] = javaVersion.split("\\.", 3);
if (Integer.parseInt(versions[0]) >= compareTo ||
Integer.parseInt(versions[1]) >= compareTo) {
return true;
}
return false;
}
//
// Classes
//
/**
* An array enumeration.
*
* @author Andy Clark, IBM
*/
static class ArrayEnumeration
implements Enumeration {
//
// Data
//
/** Array. */
private Object[] array;
/** Index. */
private int index;
//
// Constructors
//
/** Constructs an array enumeration. */
public ArrayEnumeration(Object[] array) {
this.array = array;
} // <init>(Object[])
//
// Enumeration methods
//
/**
* Tests if this enumeration contains more elements.
*
* @return <code>true</code> if this enumeration contains more elements;
* <code>false</code> otherwise.
* @since JDK1.0
*/
public boolean hasMoreElements() {
return index < array.length;
} // hasMoreElement():boolean
/**
* Returns the next element of this enumeration.
*
* @return the next element of this enumeration.
* @exception NoSuchElementException if no more elements exist.
* @since JDK1.0
*/
public Object nextElement() {
if (index < array.length) {
return array[index++];
}
throw new NoSuchElementException();
} // nextElement():Object
} // class ArrayEnumeration
//
// MAIN
//
/** Prints all of the constants to standard output. */
public static void main(String[] argv) {
print("SAX features:", SAX_FEATURE_PREFIX, fgSAXFeatures);
print("SAX properties:", SAX_PROPERTY_PREFIX, fgSAXProperties);
print("Xerces features:", XERCES_FEATURE_PREFIX, fgXercesFeatures);
print("Xerces properties:", XERCES_PROPERTY_PREFIX, fgXercesProperties);
} // main(String[])
/** Prints a list of features/properties. */
private static void print(String header, String prefix, Object[] array) {
System.out.print(header);
if (array.length > 0) {
System.out.println();
for (int i = 0; i < array.length; i++) {
System.out.print(" ");
System.out.print(prefix);
System.out.println(array[i]);
}
}
else {
System.out.println(" none.");
}
} // print(String,String,Object[])
} // class Constants
|
google/shipshape | 37,611 | third_party/proto/java/src/main/java/com/google/protobuf/ByteString.java | // Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
/**
* Immutable sequence of bytes. Substring is supported by sharing the reference
* to the immutable underlying bytes, as with {@link String}. Concatenation is
* likewise supported without copying (long strings) by building a tree of
* pieces in {@link RopeByteString}.
* <p>
* Like {@link String}, the contents of a {@link ByteString} can never be
* observed to change, not even in the presence of a data race or incorrect
* API usage in the client code.
*
* @author crazybob@google.com Bob Lee
* @author kenton@google.com Kenton Varda
* @author carlanton@google.com Carl Haverl
* @author martinrb@google.com Martin Buchholz
*/
public abstract class ByteString implements Iterable<Byte>, Serializable {
/**
* When two strings to be concatenated have a combined length shorter than
* this, we just copy their bytes on {@link #concat(ByteString)}.
* The trade-off is copy size versus the overhead of creating tree nodes
* in {@link RopeByteString}.
*/
static final int CONCATENATE_BY_COPY_SIZE = 128;
/**
* When copying an InputStream into a ByteString with .readFrom(),
* the chunks in the underlying rope start at 256 bytes, but double
* each iteration up to 8192 bytes.
*/
static final int MIN_READ_FROM_CHUNK_SIZE = 0x100; // 256b
static final int MAX_READ_FROM_CHUNK_SIZE = 0x2000; // 8k
/**
* Empty {@code ByteString}.
*/
public static final ByteString EMPTY = new LiteralByteString(new byte[0]);
// This constructor is here to prevent subclassing outside of this package,
ByteString() {}
/**
* Gets the byte at the given index. This method should be used only for
* random access to individual bytes. To access bytes sequentially, use the
* {@link ByteIterator} returned by {@link #iterator()}, and call {@link
* #substring(int, int)} first if necessary.
*
* @param index index of byte
* @return the value
* @throws ArrayIndexOutOfBoundsException {@code index} is < 0 or >= size
*/
public abstract byte byteAt(int index);
/**
* Return a {@link ByteString.ByteIterator} over the bytes in the ByteString.
* To avoid auto-boxing, you may get the iterator manually and call
* {@link ByteIterator#nextByte()}.
*
* @return the iterator
*/
public abstract ByteIterator iterator();
/**
* This interface extends {@code Iterator<Byte>}, so that we can return an
* unboxed {@code byte}.
*/
public interface ByteIterator extends Iterator<Byte> {
/**
* An alternative to {@link Iterator#next()} that returns an
* unboxed primitive {@code byte}.
*
* @return the next {@code byte} in the iteration
* @throws NoSuchElementException if the iteration has no more elements
*/
byte nextByte();
}
/**
* Gets the number of bytes.
*
* @return size in bytes
*/
public abstract int size();
/**
* Returns {@code true} if the size is {@code 0}, {@code false} otherwise.
*
* @return true if this is zero bytes long
*/
public boolean isEmpty() {
return size() == 0;
}
// =================================================================
// ByteString -> substring
/**
* Return the substring from {@code beginIndex}, inclusive, to the end of the
* string.
*
* @param beginIndex start at this index
* @return substring sharing underlying data
* @throws IndexOutOfBoundsException if {@code beginIndex < 0} or
* {@code beginIndex > size()}.
*/
public ByteString substring(int beginIndex) {
return substring(beginIndex, size());
}
/**
* Return the substring from {@code beginIndex}, inclusive, to {@code
* endIndex}, exclusive.
*
* @param beginIndex start at this index
* @param endIndex the last character is the one before this index
* @return substring sharing underlying data
* @throws IndexOutOfBoundsException if {@code beginIndex < 0},
* {@code endIndex > size()}, or {@code beginIndex > endIndex}.
*/
public abstract ByteString substring(int beginIndex, int endIndex);
/**
* Tests if this bytestring starts with the specified prefix.
* Similar to {@link String#startsWith(String)}
*
* @param prefix the prefix.
* @return <code>true</code> if the byte sequence represented by the
* argument is a prefix of the byte sequence represented by
* this string; <code>false</code> otherwise.
*/
public boolean startsWith(ByteString prefix) {
return size() >= prefix.size() &&
substring(0, prefix.size()).equals(prefix);
}
/**
* Tests if this bytestring ends with the specified suffix.
* Similar to {@link String#endsWith(String)}
*
* @param suffix the suffix.
* @return <code>true</code> if the byte sequence represented by the
* argument is a suffix of the byte sequence represented by
* this string; <code>false</code> otherwise.
*/
public boolean endsWith(ByteString suffix) {
return size() >= suffix.size() &&
substring(size() - suffix.size()).equals(suffix);
}
// =================================================================
// byte[] -> ByteString
/**
* Copies the given bytes into a {@code ByteString}.
*
* @param bytes source array
* @param offset offset in source array
* @param size number of bytes to copy
* @return new {@code ByteString}
*/
public static ByteString copyFrom(byte[] bytes, int offset, int size) {
byte[] copy = new byte[size];
System.arraycopy(bytes, offset, copy, 0, size);
return new LiteralByteString(copy);
}
/**
* Copies the given bytes into a {@code ByteString}.
*
* @param bytes to copy
* @return new {@code ByteString}
*/
public static ByteString copyFrom(byte[] bytes) {
return copyFrom(bytes, 0, bytes.length);
}
/**
* Copies the next {@code size} bytes from a {@code java.nio.ByteBuffer} into
* a {@code ByteString}.
*
* @param bytes source buffer
* @param size number of bytes to copy
* @return new {@code ByteString}
*/
public static ByteString copyFrom(ByteBuffer bytes, int size) {
byte[] copy = new byte[size];
bytes.get(copy);
return new LiteralByteString(copy);
}
/**
* Copies the remaining bytes from a {@code java.nio.ByteBuffer} into
* a {@code ByteString}.
*
* @param bytes sourceBuffer
* @return new {@code ByteString}
*/
public static ByteString copyFrom(ByteBuffer bytes) {
return copyFrom(bytes, bytes.remaining());
}
/**
* Encodes {@code text} into a sequence of bytes using the named charset
* and returns the result as a {@code ByteString}.
*
* @param text source string
* @param charsetName encoding to use
* @return new {@code ByteString}
* @throws UnsupportedEncodingException if the encoding isn't found
*/
public static ByteString copyFrom(String text, String charsetName)
throws UnsupportedEncodingException {
return new LiteralByteString(text.getBytes(charsetName));
}
/**
* Encodes {@code text} into a sequence of bytes using the named charset
* and returns the result as a {@code ByteString}.
*
* @param text source string
* @param charset encode using this charset
* @return new {@code ByteString}
*/
public static ByteString copyFrom(String text, Charset charset) {
return new LiteralByteString(text.getBytes(charset));
}
/**
* Encodes {@code text} into a sequence of UTF-8 bytes and returns the
* result as a {@code ByteString}.
*
* @param text source string
* @return new {@code ByteString}
*/
public static ByteString copyFromUtf8(String text) {
return new LiteralByteString(text.getBytes(Internal.UTF_8));
}
// =================================================================
// InputStream -> ByteString
/**
* Completely reads the given stream's bytes into a
* {@code ByteString}, blocking if necessary until all bytes are
* read through to the end of the stream.
*
* <b>Performance notes:</b> The returned {@code ByteString} is an
* immutable tree of byte arrays ("chunks") of the stream data. The
* first chunk is small, with subsequent chunks each being double
* the size, up to 8K. If the caller knows the precise length of
* the stream and wishes to avoid all unnecessary copies and
* allocations, consider using the two-argument version of this
* method, below.
*
* @param streamToDrain The source stream, which is read completely
* but not closed.
* @return A new {@code ByteString} which is made up of chunks of
* various sizes, depending on the behavior of the underlying
* stream.
* @throws IOException IOException is thrown if there is a problem
* reading the underlying stream.
*/
public static ByteString readFrom(InputStream streamToDrain)
throws IOException {
return readFrom(
streamToDrain, MIN_READ_FROM_CHUNK_SIZE, MAX_READ_FROM_CHUNK_SIZE);
}
/**
* Completely reads the given stream's bytes into a
* {@code ByteString}, blocking if necessary until all bytes are
* read through to the end of the stream.
*
* <b>Performance notes:</b> The returned {@code ByteString} is an
* immutable tree of byte arrays ("chunks") of the stream data. The
* chunkSize parameter sets the size of these byte arrays. In
* particular, if the chunkSize is precisely the same as the length
* of the stream, unnecessary allocations and copies will be
* avoided. Otherwise, the chunks will be of the given size, except
* for the last chunk, which will be resized (via a reallocation and
* copy) to contain the remainder of the stream.
*
* @param streamToDrain The source stream, which is read completely
* but not closed.
* @param chunkSize The size of the chunks in which to read the
* stream.
* @return A new {@code ByteString} which is made up of chunks of
* the given size.
* @throws IOException IOException is thrown if there is a problem
* reading the underlying stream.
*/
public static ByteString readFrom(InputStream streamToDrain, int chunkSize)
throws IOException {
return readFrom(streamToDrain, chunkSize, chunkSize);
}
// Helper method that takes the chunk size range as a parameter.
public static ByteString readFrom(InputStream streamToDrain, int minChunkSize,
int maxChunkSize) throws IOException {
Collection<ByteString> results = new ArrayList<ByteString>();
// copy the inbound bytes into a list of chunks; the chunk size
// grows exponentially to support both short and long streams.
int chunkSize = minChunkSize;
while (true) {
ByteString chunk = readChunk(streamToDrain, chunkSize);
if (chunk == null) {
break;
}
results.add(chunk);
chunkSize = Math.min(chunkSize * 2, maxChunkSize);
}
return ByteString.copyFrom(results);
}
/**
* Blocks until a chunk of the given size can be made from the
* stream, or EOF is reached. Calls read() repeatedly in case the
* given stream implementation doesn't completely fill the given
* buffer in one read() call.
*
* @return A chunk of the desired size, or else a chunk as large as
* was available when end of stream was reached. Returns null if the
* given stream had no more data in it.
*/
private static ByteString readChunk(InputStream in, final int chunkSize)
throws IOException {
final byte[] buf = new byte[chunkSize];
int bytesRead = 0;
while (bytesRead < chunkSize) {
final int count = in.read(buf, bytesRead, chunkSize - bytesRead);
if (count == -1) {
break;
}
bytesRead += count;
}
if (bytesRead == 0) {
return null;
} else {
return ByteString.copyFrom(buf, 0, bytesRead);
}
}
// =================================================================
// Multiple ByteStrings -> One ByteString
/**
* Concatenate the given {@code ByteString} to this one. Short concatenations,
* of total size smaller than {@link ByteString#CONCATENATE_BY_COPY_SIZE}, are
* produced by copying the underlying bytes (as per Rope.java, <a
* href="http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf">
* BAP95 </a>. In general, the concatenate involves no copying.
*
* @param other string to concatenate
* @return a new {@code ByteString} instance
*/
public ByteString concat(ByteString other) {
int thisSize = size();
int otherSize = other.size();
if ((long) thisSize + otherSize >= Integer.MAX_VALUE) {
throw new IllegalArgumentException("ByteString would be too long: " +
thisSize + "+" + otherSize);
}
return RopeByteString.concatenate(this, other);
}
/**
* Concatenates all byte strings in the iterable and returns the result.
* This is designed to run in O(list size), not O(total bytes).
*
* <p>The returned {@code ByteString} is not necessarily a unique object.
* If the list is empty, the returned object is the singleton empty
* {@code ByteString}. If the list has only one element, that
* {@code ByteString} will be returned without copying.
*
* @param byteStrings strings to be concatenated
* @return new {@code ByteString}
*/
public static ByteString copyFrom(Iterable<ByteString> byteStrings) {
Collection<ByteString> collection;
if (!(byteStrings instanceof Collection)) {
collection = new ArrayList<ByteString>();
for (ByteString byteString : byteStrings) {
collection.add(byteString);
}
} else {
collection = (Collection<ByteString>) byteStrings;
}
ByteString result;
if (collection.isEmpty()) {
result = EMPTY;
} else {
result = balancedConcat(collection.iterator(), collection.size());
}
return result;
}
// Internal function used by copyFrom(Iterable<ByteString>).
// Create a balanced concatenation of the next "length" elements from the
// iterable.
private static ByteString balancedConcat(Iterator<ByteString> iterator,
int length) {
assert length >= 1;
ByteString result;
if (length == 1) {
result = iterator.next();
} else {
int halfLength = length >>> 1;
ByteString left = balancedConcat(iterator, halfLength);
ByteString right = balancedConcat(iterator, length - halfLength);
result = left.concat(right);
}
return result;
}
// =================================================================
// ByteString -> byte[]
/**
* Copies bytes into a buffer at the given offset.
*
* @param target buffer to copy into
* @param offset in the target buffer
* @throws IndexOutOfBoundsException if the offset is negative or too large
*/
public void copyTo(byte[] target, int offset) {
copyTo(target, 0, offset, size());
}
/**
* Copies bytes into a buffer.
*
* @param target buffer to copy into
* @param sourceOffset offset within these bytes
* @param targetOffset offset within the target buffer
* @param numberToCopy number of bytes to copy
* @throws IndexOutOfBoundsException if an offset or size is negative or too
* large
*/
public void copyTo(byte[] target, int sourceOffset, int targetOffset,
int numberToCopy) {
if (sourceOffset < 0) {
throw new IndexOutOfBoundsException("Source offset < 0: " + sourceOffset);
}
if (targetOffset < 0) {
throw new IndexOutOfBoundsException("Target offset < 0: " + targetOffset);
}
if (numberToCopy < 0) {
throw new IndexOutOfBoundsException("Length < 0: " + numberToCopy);
}
if (sourceOffset + numberToCopy > size()) {
throw new IndexOutOfBoundsException(
"Source end offset < 0: " + (sourceOffset + numberToCopy));
}
if (targetOffset + numberToCopy > target.length) {
throw new IndexOutOfBoundsException(
"Target end offset < 0: " + (targetOffset + numberToCopy));
}
if (numberToCopy > 0) {
copyToInternal(target, sourceOffset, targetOffset, numberToCopy);
}
}
/**
* Internal (package private) implementation of
* {@link #copyTo(byte[],int,int,int)}.
* It assumes that all error checking has already been performed and that
* {@code numberToCopy > 0}.
*/
protected abstract void copyToInternal(byte[] target, int sourceOffset,
int targetOffset, int numberToCopy);
/**
* Copies bytes into a ByteBuffer.
*
* @param target ByteBuffer to copy into.
* @throws java.nio.ReadOnlyBufferException if the {@code target} is read-only
* @throws java.nio.BufferOverflowException if the {@code target}'s
* remaining() space is not large enough to hold the data.
*/
public abstract void copyTo(ByteBuffer target);
/**
* Copies bytes to a {@code byte[]}.
*
* @return copied bytes
*/
public byte[] toByteArray() {
int size = size();
if (size == 0) {
return Internal.EMPTY_BYTE_ARRAY;
}
byte[] result = new byte[size];
copyToInternal(result, 0, 0, size);
return result;
}
/**
* Writes the complete contents of this byte string to
* the specified output stream argument.
*
* @param out the output stream to which to write the data.
* @throws IOException if an I/O error occurs.
*/
public abstract void writeTo(OutputStream out) throws IOException;
/**
* Writes a specified part of this byte string to an output stream.
*
* @param out the output stream to which to write the data.
* @param sourceOffset offset within these bytes
* @param numberToWrite number of bytes to write
* @throws IOException if an I/O error occurs.
* @throws IndexOutOfBoundsException if an offset or size is negative or too
* large
*/
void writeTo(OutputStream out, int sourceOffset, int numberToWrite)
throws IOException {
if (sourceOffset < 0) {
throw new IndexOutOfBoundsException("Source offset < 0: " + sourceOffset);
}
if (numberToWrite < 0) {
throw new IndexOutOfBoundsException("Length < 0: " + numberToWrite);
}
if (sourceOffset + numberToWrite > size()) {
throw new IndexOutOfBoundsException(
"Source end offset exceeded: " + (sourceOffset + numberToWrite));
}
if (numberToWrite > 0) {
writeToInternal(out, sourceOffset, numberToWrite);
}
}
/**
* Internal version of {@link #writeTo(OutputStream,int,int)} that assumes
* all error checking has already been done.
*/
abstract void writeToInternal(OutputStream out, int sourceOffset,
int numberToWrite) throws IOException;
/**
* Constructs a read-only {@code java.nio.ByteBuffer} whose content
* is equal to the contents of this byte string.
* The result uses the same backing array as the byte string, if possible.
*
* @return wrapped bytes
*/
public abstract ByteBuffer asReadOnlyByteBuffer();
/**
* Constructs a list of read-only {@code java.nio.ByteBuffer} objects
* such that the concatenation of their contents is equal to the contents
* of this byte string. The result uses the same backing arrays as the
* byte string.
* <p>
* By returning a list, implementations of this method may be able to avoid
* copying even when there are multiple backing arrays.
*
* @return a list of wrapped bytes
*/
public abstract List<ByteBuffer> asReadOnlyByteBufferList();
/**
* Constructs a new {@code String} by decoding the bytes using the
* specified charset.
*
* @param charsetName encode using this charset
* @return new string
* @throws UnsupportedEncodingException if charset isn't recognized
*/
public String toString(String charsetName)
throws UnsupportedEncodingException {
try {
return toString(Charset.forName(charsetName));
} catch (UnsupportedCharsetException e) {
UnsupportedEncodingException exception = new UnsupportedEncodingException(charsetName);
exception.initCause(e);
throw exception;
}
}
/**
* Constructs a new {@code String} by decoding the bytes using the
* specified charset. Returns the same empty String if empty.
*
* @param charset encode using this charset
* @return new string
*/
public String toString(Charset charset) {
return size() == 0 ? "" : toStringInternal(charset);
}
/**
* Constructs a new {@code String} by decoding the bytes using the
* specified charset.
*
* @param charset encode using this charset
* @return new string
*/
protected abstract String toStringInternal(Charset charset);
// =================================================================
// UTF-8 decoding
/**
* Constructs a new {@code String} by decoding the bytes as UTF-8.
*
* @return new string using UTF-8 encoding
*/
public String toStringUtf8() {
return toString(Internal.UTF_8);
}
/**
* Tells whether this {@code ByteString} represents a well-formed UTF-8
* byte sequence, such that the original bytes can be converted to a
* String object and then round tripped back to bytes without loss.
*
* <p>More precisely, returns {@code true} whenever: <pre> {@code
* Arrays.equals(byteString.toByteArray(),
* new String(byteString.toByteArray(), "UTF-8").getBytes("UTF-8"))
* }</pre>
*
* <p>This method returns {@code false} for "overlong" byte sequences,
* as well as for 3-byte sequences that would map to a surrogate
* character, in accordance with the restricted definition of UTF-8
* introduced in Unicode 3.1. Note that the UTF-8 decoder included in
* Oracle's JDK has been modified to also reject "overlong" byte
* sequences, but (as of 2011) still accepts 3-byte surrogate
* character byte sequences.
*
* <p>See the Unicode Standard,</br>
* Table 3-6. <em>UTF-8 Bit Distribution</em>,</br>
* Table 3-7. <em>Well Formed UTF-8 Byte Sequences</em>.
*
* @return whether the bytes in this {@code ByteString} are a
* well-formed UTF-8 byte sequence
*/
public abstract boolean isValidUtf8();
/**
* Tells whether the given byte sequence is a well-formed, malformed, or
* incomplete UTF-8 byte sequence. This method accepts and returns a partial
* state result, allowing the bytes for a complete UTF-8 byte sequence to be
* composed from multiple {@code ByteString} segments.
*
* @param state either {@code 0} (if this is the initial decoding operation)
* or the value returned from a call to a partial decoding method for the
* previous bytes
* @param offset offset of the first byte to check
* @param length number of bytes to check
*
* @return {@code -1} if the partial byte sequence is definitely malformed,
* {@code 0} if it is well-formed (no additional input needed), or, if the
* byte sequence is "incomplete", i.e. apparently terminated in the middle of
* a character, an opaque integer "state" value containing enough information
* to decode the character when passed to a subsequent invocation of a
* partial decoding method.
*/
protected abstract int partialIsValidUtf8(int state, int offset, int length);
// =================================================================
// equals() and hashCode()
@Override
public abstract boolean equals(Object o);
/**
* Return a non-zero hashCode depending only on the sequence of bytes
* in this ByteString.
*
* @return hashCode value for this object
*/
@Override
public abstract int hashCode();
// =================================================================
// Input stream
/**
* Creates an {@code InputStream} which can be used to read the bytes.
* <p>
* The {@link InputStream} returned by this method is guaranteed to be
* completely non-blocking. The method {@link InputStream#available()}
* returns the number of bytes remaining in the stream. The methods
* {@link InputStream#read(byte[])}, {@link InputStream#read(byte[],int,int)}
* and {@link InputStream#skip(long)} will read/skip as many bytes as are
* available.
* <p>
* The methods in the returned {@link InputStream} might <b>not</b> be
* thread safe.
*
* @return an input stream that returns the bytes of this byte string.
*/
public abstract InputStream newInput();
/**
* Creates a {@link CodedInputStream} which can be used to read the bytes.
* Using this is often more efficient than creating a {@link CodedInputStream}
* that wraps the result of {@link #newInput()}.
*
* @return stream based on wrapped data
*/
public abstract CodedInputStream newCodedInput();
// =================================================================
// Output stream
/**
* Creates a new {@link Output} with the given initial capacity. Call {@link
* Output#toByteString()} to create the {@code ByteString} instance.
* <p>
* A {@link ByteString.Output} offers the same functionality as a
* {@link ByteArrayOutputStream}, except that it returns a {@link ByteString}
* rather than a {@code byte} array.
*
* @param initialCapacity estimate of number of bytes to be written
* @return {@code OutputStream} for building a {@code ByteString}
*/
public static Output newOutput(int initialCapacity) {
return new Output(initialCapacity);
}
/**
* Creates a new {@link Output}. Call {@link Output#toByteString()} to create
* the {@code ByteString} instance.
* <p>
* A {@link ByteString.Output} offers the same functionality as a
* {@link ByteArrayOutputStream}, except that it returns a {@link ByteString}
* rather than a {@code byte array}.
*
* @return {@code OutputStream} for building a {@code ByteString}
*/
public static Output newOutput() {
return new Output(CONCATENATE_BY_COPY_SIZE);
}
/**
* Outputs to a {@code ByteString} instance. Call {@link #toByteString()} to
* create the {@code ByteString} instance.
*/
public static final class Output extends OutputStream {
// Implementation note.
// The public methods of this class must be synchronized. ByteStrings
// are guaranteed to be immutable. Without some sort of locking, it could
// be possible for one thread to call toByteSring(), while another thread
// is still modifying the underlying byte array.
private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
// argument passed by user, indicating initial capacity.
private final int initialCapacity;
// ByteStrings to be concatenated to create the result
private final ArrayList<ByteString> flushedBuffers;
// Total number of bytes in the ByteStrings of flushedBuffers
private int flushedBuffersTotalBytes;
// Current buffer to which we are writing
private byte[] buffer;
// Location in buffer[] to which we write the next byte.
private int bufferPos;
/**
* Creates a new ByteString output stream with the specified
* initial capacity.
*
* @param initialCapacity the initial capacity of the output stream.
*/
Output(int initialCapacity) {
if (initialCapacity < 0) {
throw new IllegalArgumentException("Buffer size < 0");
}
this.initialCapacity = initialCapacity;
this.flushedBuffers = new ArrayList<ByteString>();
this.buffer = new byte[initialCapacity];
}
@Override
public synchronized void write(int b) {
if (bufferPos == buffer.length) {
flushFullBuffer(1);
}
buffer[bufferPos++] = (byte)b;
}
@Override
public synchronized void write(byte[] b, int offset, int length) {
if (length <= buffer.length - bufferPos) {
// The bytes can fit into the current buffer.
System.arraycopy(b, offset, buffer, bufferPos, length);
bufferPos += length;
} else {
// Use up the current buffer
int copySize = buffer.length - bufferPos;
System.arraycopy(b, offset, buffer, bufferPos, copySize);
offset += copySize;
length -= copySize;
// Flush the buffer, and get a new buffer at least big enough to cover
// what we still need to output
flushFullBuffer(length);
System.arraycopy(b, offset, buffer, 0 /* count */, length);
bufferPos = length;
}
}
/**
* Creates a byte string. Its size is the current size of this output
* stream and its output has been copied to it.
*
* @return the current contents of this output stream, as a byte string.
*/
public synchronized ByteString toByteString() {
flushLastBuffer();
return ByteString.copyFrom(flushedBuffers);
}
/**
* Implement java.util.Arrays.copyOf() for jdk 1.5.
*/
private byte[] copyArray(byte[] buffer, int length) {
byte[] result = new byte[length];
System.arraycopy(buffer, 0, result, 0, Math.min(buffer.length, length));
return result;
}
/**
* Writes the complete contents of this byte array output stream to
* the specified output stream argument.
*
* @param out the output stream to which to write the data.
* @throws IOException if an I/O error occurs.
*/
public void writeTo(OutputStream out) throws IOException {
ByteString[] cachedFlushBuffers;
byte[] cachedBuffer;
int cachedBufferPos;
synchronized (this) {
// Copy the information we need into local variables so as to hold
// the lock for as short a time as possible.
cachedFlushBuffers =
flushedBuffers.toArray(new ByteString[flushedBuffers.size()]);
cachedBuffer = buffer;
cachedBufferPos = bufferPos;
}
for (ByteString byteString : cachedFlushBuffers) {
byteString.writeTo(out);
}
out.write(copyArray(cachedBuffer, cachedBufferPos));
}
/**
* Returns the current size of the output stream.
*
* @return the current size of the output stream
*/
public synchronized int size() {
return flushedBuffersTotalBytes + bufferPos;
}
/**
* Resets this stream, so that all currently accumulated output in the
* output stream is discarded. The output stream can be used again,
* reusing the already allocated buffer space.
*/
public synchronized void reset() {
flushedBuffers.clear();
flushedBuffersTotalBytes = 0;
bufferPos = 0;
}
@Override
public String toString() {
return String.format("<ByteString.Output@%s size=%d>",
Integer.toHexString(System.identityHashCode(this)), size());
}
/**
* Internal function used by writers. The current buffer is full, and the
* writer needs a new buffer whose size is at least the specified minimum
* size.
*/
private void flushFullBuffer(int minSize) {
flushedBuffers.add(new LiteralByteString(buffer));
flushedBuffersTotalBytes += buffer.length;
// We want to increase our total capacity by 50%, but as a minimum,
// the new buffer should also at least be >= minSize and
// >= initial Capacity.
int newSize = Math.max(initialCapacity,
Math.max(minSize, flushedBuffersTotalBytes >>> 1));
buffer = new byte[newSize];
bufferPos = 0;
}
/**
* Internal function used by {@link #toByteString()}. The current buffer may
* or may not be full, but it needs to be flushed.
*/
private void flushLastBuffer() {
if (bufferPos < buffer.length) {
if (bufferPos > 0) {
byte[] bufferCopy = copyArray(buffer, bufferPos);
flushedBuffers.add(new LiteralByteString(bufferCopy));
}
// We reuse this buffer for further writes.
} else {
// Buffer is completely full. Huzzah.
flushedBuffers.add(new LiteralByteString(buffer));
// 99% of the time, we're not going to use this OutputStream again.
// We set buffer to an empty byte stream so that we're handling this
// case without wasting space. In the rare case that more writes
// *do* occur, this empty buffer will be flushed and an appropriately
// sized new buffer will be created.
buffer = EMPTY_BYTE_ARRAY;
}
flushedBuffersTotalBytes += bufferPos;
bufferPos = 0;
}
}
/**
* Constructs a new {@code ByteString} builder, which allows you to
* efficiently construct a {@code ByteString} by writing to a {@link
* CodedOutputStream}. Using this is much more efficient than calling {@code
* newOutput()} and wrapping that in a {@code CodedOutputStream}.
*
* <p>This is package-private because it's a somewhat confusing interface.
* Users can call {@link Message#toByteString()} instead of calling this
* directly.
*
* @param size The target byte size of the {@code ByteString}. You must write
* exactly this many bytes before building the result.
* @return the builder
*/
static CodedBuilder newCodedBuilder(int size) {
return new CodedBuilder(size);
}
/** See {@link ByteString#newCodedBuilder(int)}. */
static final class CodedBuilder {
private final CodedOutputStream output;
private final byte[] buffer;
private CodedBuilder(int size) {
buffer = new byte[size];
output = CodedOutputStream.newInstance(buffer);
}
public ByteString build() {
output.checkNoSpaceLeft();
// We can be confident that the CodedOutputStream will not modify the
// underlying bytes anymore because it already wrote all of them. So,
// no need to make a copy.
return new LiteralByteString(buffer);
}
public CodedOutputStream getCodedOutput() {
return output;
}
}
// =================================================================
// Methods {@link RopeByteString} needs on instances, which aren't part of the
// public API.
/**
* Return the depth of the tree representing this {@code ByteString}, if any,
* whose root is this node. If this is a leaf node, return 0.
*
* @return tree depth or zero
*/
protected abstract int getTreeDepth();
/**
* Return {@code true} if this ByteString is literal (a leaf node) or a
* flat-enough tree in the sense of {@link RopeByteString}.
*
* @return true if the tree is flat enough
*/
protected abstract boolean isBalanced();
/**
* Return the cached hash code if available.
*
* @return value of cached hash code or 0 if not computed yet
*/
protected abstract int peekCachedHashCode();
/**
* Compute the hash across the value bytes starting with the given hash, and
* return the result. This is used to compute the hash across strings
* represented as a set of pieces by allowing the hash computation to be
* continued from piece to piece.
*
* @param h starting hash value
* @param offset offset into this value to start looking at data values
* @param length number of data values to include in the hash computation
* @return ending hash value
*/
protected abstract int partialHash(int h, int offset, int length);
@Override
public String toString() {
return String.format("<ByteString@%s size=%d>",
Integer.toHexString(System.identityHashCode(this)), size());
}
}
|
googleads/google-ads-java | 37,606 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/BrandCampaignAssets.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/campaign_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Assets linked at the campaign level.
* A business_name and at least one logo_asset are required.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.BrandCampaignAssets}
*/
public final class BrandCampaignAssets extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.BrandCampaignAssets)
BrandCampaignAssetsOrBuilder {
private static final long serialVersionUID = 0L;
// Use BrandCampaignAssets.newBuilder() to construct.
private BrandCampaignAssets(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BrandCampaignAssets() {
businessNameAsset_ = "";
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BrandCampaignAssets();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.CampaignServiceProto.internal_static_google_ads_googleads_v19_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.CampaignServiceProto.internal_static_google_ads_googleads_v19_services_BrandCampaignAssets_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.BrandCampaignAssets.class, com.google.ads.googleads.v19.services.BrandCampaignAssets.Builder.class);
}
public static final int BUSINESS_NAME_ASSET_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object businessNameAsset_ = "";
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The businessNameAsset.
*/
@java.lang.Override
public java.lang.String getBusinessNameAsset() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
businessNameAsset_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for businessNameAsset.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBusinessNameAssetBytes() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
businessNameAsset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOGO_ASSET_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the logoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLogoAssetList() {
return logoAsset_;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of logoAsset.
*/
public int getLogoAssetCount() {
return logoAsset_.size();
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The logoAsset at the given index.
*/
public java.lang.String getLogoAsset(int index) {
return logoAsset_.get(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the logoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLogoAssetBytes(int index) {
return logoAsset_.getByteString(index);
}
public static final int LANDSCAPE_LOGO_ASSET_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the landscapeLogoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLandscapeLogoAssetList() {
return landscapeLogoAsset_;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of landscapeLogoAsset.
*/
public int getLandscapeLogoAssetCount() {
return landscapeLogoAsset_.size();
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The landscapeLogoAsset at the given index.
*/
public java.lang.String getLandscapeLogoAsset(int index) {
return landscapeLogoAsset_.get(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the landscapeLogoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLandscapeLogoAssetBytes(int index) {
return landscapeLogoAsset_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(businessNameAsset_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, businessNameAsset_);
}
for (int i = 0; i < logoAsset_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, logoAsset_.getRaw(i));
}
for (int i = 0; i < landscapeLogoAsset_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, landscapeLogoAsset_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(businessNameAsset_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, businessNameAsset_);
}
{
int dataSize = 0;
for (int i = 0; i < logoAsset_.size(); i++) {
dataSize += computeStringSizeNoTag(logoAsset_.getRaw(i));
}
size += dataSize;
size += 1 * getLogoAssetList().size();
}
{
int dataSize = 0;
for (int i = 0; i < landscapeLogoAsset_.size(); i++) {
dataSize += computeStringSizeNoTag(landscapeLogoAsset_.getRaw(i));
}
size += dataSize;
size += 1 * getLandscapeLogoAssetList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.BrandCampaignAssets)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.BrandCampaignAssets other = (com.google.ads.googleads.v19.services.BrandCampaignAssets) obj;
if (!getBusinessNameAsset()
.equals(other.getBusinessNameAsset())) return false;
if (!getLogoAssetList()
.equals(other.getLogoAssetList())) return false;
if (!getLandscapeLogoAssetList()
.equals(other.getLandscapeLogoAssetList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BUSINESS_NAME_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getBusinessNameAsset().hashCode();
if (getLogoAssetCount() > 0) {
hash = (37 * hash) + LOGO_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getLogoAssetList().hashCode();
}
if (getLandscapeLogoAssetCount() > 0) {
hash = (37 * hash) + LANDSCAPE_LOGO_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getLandscapeLogoAssetList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.BrandCampaignAssets prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Assets linked at the campaign level.
* A business_name and at least one logo_asset are required.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.BrandCampaignAssets}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.BrandCampaignAssets)
com.google.ads.googleads.v19.services.BrandCampaignAssetsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.CampaignServiceProto.internal_static_google_ads_googleads_v19_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.CampaignServiceProto.internal_static_google_ads_googleads_v19_services_BrandCampaignAssets_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.BrandCampaignAssets.class, com.google.ads.googleads.v19.services.BrandCampaignAssets.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.BrandCampaignAssets.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
businessNameAsset_ = "";
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.CampaignServiceProto.internal_static_google_ads_googleads_v19_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.BrandCampaignAssets getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.BrandCampaignAssets.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.BrandCampaignAssets build() {
com.google.ads.googleads.v19.services.BrandCampaignAssets result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.BrandCampaignAssets buildPartial() {
com.google.ads.googleads.v19.services.BrandCampaignAssets result = new com.google.ads.googleads.v19.services.BrandCampaignAssets(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.services.BrandCampaignAssets result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.businessNameAsset_ = businessNameAsset_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
logoAsset_.makeImmutable();
result.logoAsset_ = logoAsset_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
landscapeLogoAsset_.makeImmutable();
result.landscapeLogoAsset_ = landscapeLogoAsset_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.BrandCampaignAssets) {
return mergeFrom((com.google.ads.googleads.v19.services.BrandCampaignAssets)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.BrandCampaignAssets other) {
if (other == com.google.ads.googleads.v19.services.BrandCampaignAssets.getDefaultInstance()) return this;
if (!other.getBusinessNameAsset().isEmpty()) {
businessNameAsset_ = other.businessNameAsset_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.logoAsset_.isEmpty()) {
if (logoAsset_.isEmpty()) {
logoAsset_ = other.logoAsset_;
bitField0_ |= 0x00000002;
} else {
ensureLogoAssetIsMutable();
logoAsset_.addAll(other.logoAsset_);
}
onChanged();
}
if (!other.landscapeLogoAsset_.isEmpty()) {
if (landscapeLogoAsset_.isEmpty()) {
landscapeLogoAsset_ = other.landscapeLogoAsset_;
bitField0_ |= 0x00000004;
} else {
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.addAll(other.landscapeLogoAsset_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
businessNameAsset_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
java.lang.String s = input.readStringRequireUtf8();
ensureLogoAssetIsMutable();
logoAsset_.add(s);
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object businessNameAsset_ = "";
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The businessNameAsset.
*/
public java.lang.String getBusinessNameAsset() {
java.lang.Object ref = businessNameAsset_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
businessNameAsset_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for businessNameAsset.
*/
public com.google.protobuf.ByteString
getBusinessNameAssetBytes() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
businessNameAsset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The businessNameAsset to set.
* @return This builder for chaining.
*/
public Builder setBusinessNameAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
businessNameAsset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBusinessNameAsset() {
businessNameAsset_ = getDefaultInstance().getBusinessNameAsset();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for businessNameAsset to set.
* @return This builder for chaining.
*/
public Builder setBusinessNameAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
businessNameAsset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureLogoAssetIsMutable() {
if (!logoAsset_.isModifiable()) {
logoAsset_ = new com.google.protobuf.LazyStringArrayList(logoAsset_);
}
bitField0_ |= 0x00000002;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the logoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLogoAssetList() {
logoAsset_.makeImmutable();
return logoAsset_;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of logoAsset.
*/
public int getLogoAssetCount() {
return logoAsset_.size();
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The logoAsset at the given index.
*/
public java.lang.String getLogoAsset(int index) {
return logoAsset_.get(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the logoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLogoAssetBytes(int index) {
return logoAsset_.getByteString(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The logoAsset to set.
* @return This builder for chaining.
*/
public Builder setLogoAsset(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLogoAssetIsMutable();
logoAsset_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The logoAsset to add.
* @return This builder for chaining.
*/
public Builder addLogoAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLogoAssetIsMutable();
logoAsset_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The logoAsset to add.
* @return This builder for chaining.
*/
public Builder addAllLogoAsset(
java.lang.Iterable<java.lang.String> values) {
ensureLogoAssetIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, logoAsset_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearLogoAsset() {
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the logoAsset to add.
* @return This builder for chaining.
*/
public Builder addLogoAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureLogoAssetIsMutable();
logoAsset_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureLandscapeLogoAssetIsMutable() {
if (!landscapeLogoAsset_.isModifiable()) {
landscapeLogoAsset_ = new com.google.protobuf.LazyStringArrayList(landscapeLogoAsset_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the landscapeLogoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLandscapeLogoAssetList() {
landscapeLogoAsset_.makeImmutable();
return landscapeLogoAsset_;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of landscapeLogoAsset.
*/
public int getLandscapeLogoAssetCount() {
return landscapeLogoAsset_.size();
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The landscapeLogoAsset at the given index.
*/
public java.lang.String getLandscapeLogoAsset(int index) {
return landscapeLogoAsset_.get(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the landscapeLogoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLandscapeLogoAssetBytes(int index) {
return landscapeLogoAsset_.getByteString(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index to set the value at.
* @param value The landscapeLogoAsset to set.
* @return This builder for chaining.
*/
public Builder setLandscapeLogoAsset(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addLandscapeLogoAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param values The landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addAllLandscapeLogoAsset(
java.lang.Iterable<java.lang.String> values) {
ensureLandscapeLogoAssetIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, landscapeLogoAsset_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return This builder for chaining.
*/
public Builder clearLandscapeLogoAsset() {
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The bytes of the landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addLandscapeLogoAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.BrandCampaignAssets)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.BrandCampaignAssets)
private static final com.google.ads.googleads.v19.services.BrandCampaignAssets DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.BrandCampaignAssets();
}
public static com.google.ads.googleads.v19.services.BrandCampaignAssets getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BrandCampaignAssets>
PARSER = new com.google.protobuf.AbstractParser<BrandCampaignAssets>() {
@java.lang.Override
public BrandCampaignAssets parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BrandCampaignAssets> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BrandCampaignAssets> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.BrandCampaignAssets getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,606 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/BrandCampaignAssets.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/campaign_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Assets linked at the campaign level.
* A business_name and at least one logo_asset are required.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.BrandCampaignAssets}
*/
public final class BrandCampaignAssets extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.BrandCampaignAssets)
BrandCampaignAssetsOrBuilder {
private static final long serialVersionUID = 0L;
// Use BrandCampaignAssets.newBuilder() to construct.
private BrandCampaignAssets(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BrandCampaignAssets() {
businessNameAsset_ = "";
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BrandCampaignAssets();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.CampaignServiceProto.internal_static_google_ads_googleads_v20_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.CampaignServiceProto.internal_static_google_ads_googleads_v20_services_BrandCampaignAssets_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.BrandCampaignAssets.class, com.google.ads.googleads.v20.services.BrandCampaignAssets.Builder.class);
}
public static final int BUSINESS_NAME_ASSET_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object businessNameAsset_ = "";
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The businessNameAsset.
*/
@java.lang.Override
public java.lang.String getBusinessNameAsset() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
businessNameAsset_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for businessNameAsset.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBusinessNameAssetBytes() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
businessNameAsset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOGO_ASSET_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the logoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLogoAssetList() {
return logoAsset_;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of logoAsset.
*/
public int getLogoAssetCount() {
return logoAsset_.size();
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The logoAsset at the given index.
*/
public java.lang.String getLogoAsset(int index) {
return logoAsset_.get(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the logoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLogoAssetBytes(int index) {
return logoAsset_.getByteString(index);
}
public static final int LANDSCAPE_LOGO_ASSET_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the landscapeLogoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLandscapeLogoAssetList() {
return landscapeLogoAsset_;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of landscapeLogoAsset.
*/
public int getLandscapeLogoAssetCount() {
return landscapeLogoAsset_.size();
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The landscapeLogoAsset at the given index.
*/
public java.lang.String getLandscapeLogoAsset(int index) {
return landscapeLogoAsset_.get(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the landscapeLogoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLandscapeLogoAssetBytes(int index) {
return landscapeLogoAsset_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(businessNameAsset_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, businessNameAsset_);
}
for (int i = 0; i < logoAsset_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, logoAsset_.getRaw(i));
}
for (int i = 0; i < landscapeLogoAsset_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, landscapeLogoAsset_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(businessNameAsset_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, businessNameAsset_);
}
{
int dataSize = 0;
for (int i = 0; i < logoAsset_.size(); i++) {
dataSize += computeStringSizeNoTag(logoAsset_.getRaw(i));
}
size += dataSize;
size += 1 * getLogoAssetList().size();
}
{
int dataSize = 0;
for (int i = 0; i < landscapeLogoAsset_.size(); i++) {
dataSize += computeStringSizeNoTag(landscapeLogoAsset_.getRaw(i));
}
size += dataSize;
size += 1 * getLandscapeLogoAssetList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.BrandCampaignAssets)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.BrandCampaignAssets other = (com.google.ads.googleads.v20.services.BrandCampaignAssets) obj;
if (!getBusinessNameAsset()
.equals(other.getBusinessNameAsset())) return false;
if (!getLogoAssetList()
.equals(other.getLogoAssetList())) return false;
if (!getLandscapeLogoAssetList()
.equals(other.getLandscapeLogoAssetList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BUSINESS_NAME_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getBusinessNameAsset().hashCode();
if (getLogoAssetCount() > 0) {
hash = (37 * hash) + LOGO_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getLogoAssetList().hashCode();
}
if (getLandscapeLogoAssetCount() > 0) {
hash = (37 * hash) + LANDSCAPE_LOGO_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getLandscapeLogoAssetList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.BrandCampaignAssets prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Assets linked at the campaign level.
* A business_name and at least one logo_asset are required.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.BrandCampaignAssets}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.BrandCampaignAssets)
com.google.ads.googleads.v20.services.BrandCampaignAssetsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.CampaignServiceProto.internal_static_google_ads_googleads_v20_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.CampaignServiceProto.internal_static_google_ads_googleads_v20_services_BrandCampaignAssets_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.BrandCampaignAssets.class, com.google.ads.googleads.v20.services.BrandCampaignAssets.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.BrandCampaignAssets.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
businessNameAsset_ = "";
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.CampaignServiceProto.internal_static_google_ads_googleads_v20_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.BrandCampaignAssets getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.BrandCampaignAssets.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.BrandCampaignAssets build() {
com.google.ads.googleads.v20.services.BrandCampaignAssets result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.BrandCampaignAssets buildPartial() {
com.google.ads.googleads.v20.services.BrandCampaignAssets result = new com.google.ads.googleads.v20.services.BrandCampaignAssets(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.services.BrandCampaignAssets result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.businessNameAsset_ = businessNameAsset_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
logoAsset_.makeImmutable();
result.logoAsset_ = logoAsset_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
landscapeLogoAsset_.makeImmutable();
result.landscapeLogoAsset_ = landscapeLogoAsset_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.BrandCampaignAssets) {
return mergeFrom((com.google.ads.googleads.v20.services.BrandCampaignAssets)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.BrandCampaignAssets other) {
if (other == com.google.ads.googleads.v20.services.BrandCampaignAssets.getDefaultInstance()) return this;
if (!other.getBusinessNameAsset().isEmpty()) {
businessNameAsset_ = other.businessNameAsset_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.logoAsset_.isEmpty()) {
if (logoAsset_.isEmpty()) {
logoAsset_ = other.logoAsset_;
bitField0_ |= 0x00000002;
} else {
ensureLogoAssetIsMutable();
logoAsset_.addAll(other.logoAsset_);
}
onChanged();
}
if (!other.landscapeLogoAsset_.isEmpty()) {
if (landscapeLogoAsset_.isEmpty()) {
landscapeLogoAsset_ = other.landscapeLogoAsset_;
bitField0_ |= 0x00000004;
} else {
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.addAll(other.landscapeLogoAsset_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
businessNameAsset_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
java.lang.String s = input.readStringRequireUtf8();
ensureLogoAssetIsMutable();
logoAsset_.add(s);
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object businessNameAsset_ = "";
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The businessNameAsset.
*/
public java.lang.String getBusinessNameAsset() {
java.lang.Object ref = businessNameAsset_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
businessNameAsset_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for businessNameAsset.
*/
public com.google.protobuf.ByteString
getBusinessNameAssetBytes() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
businessNameAsset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The businessNameAsset to set.
* @return This builder for chaining.
*/
public Builder setBusinessNameAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
businessNameAsset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBusinessNameAsset() {
businessNameAsset_ = getDefaultInstance().getBusinessNameAsset();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for businessNameAsset to set.
* @return This builder for chaining.
*/
public Builder setBusinessNameAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
businessNameAsset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureLogoAssetIsMutable() {
if (!logoAsset_.isModifiable()) {
logoAsset_ = new com.google.protobuf.LazyStringArrayList(logoAsset_);
}
bitField0_ |= 0x00000002;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the logoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLogoAssetList() {
logoAsset_.makeImmutable();
return logoAsset_;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of logoAsset.
*/
public int getLogoAssetCount() {
return logoAsset_.size();
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The logoAsset at the given index.
*/
public java.lang.String getLogoAsset(int index) {
return logoAsset_.get(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the logoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLogoAssetBytes(int index) {
return logoAsset_.getByteString(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The logoAsset to set.
* @return This builder for chaining.
*/
public Builder setLogoAsset(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLogoAssetIsMutable();
logoAsset_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The logoAsset to add.
* @return This builder for chaining.
*/
public Builder addLogoAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLogoAssetIsMutable();
logoAsset_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The logoAsset to add.
* @return This builder for chaining.
*/
public Builder addAllLogoAsset(
java.lang.Iterable<java.lang.String> values) {
ensureLogoAssetIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, logoAsset_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearLogoAsset() {
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the logoAsset to add.
* @return This builder for chaining.
*/
public Builder addLogoAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureLogoAssetIsMutable();
logoAsset_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureLandscapeLogoAssetIsMutable() {
if (!landscapeLogoAsset_.isModifiable()) {
landscapeLogoAsset_ = new com.google.protobuf.LazyStringArrayList(landscapeLogoAsset_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the landscapeLogoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLandscapeLogoAssetList() {
landscapeLogoAsset_.makeImmutable();
return landscapeLogoAsset_;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of landscapeLogoAsset.
*/
public int getLandscapeLogoAssetCount() {
return landscapeLogoAsset_.size();
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The landscapeLogoAsset at the given index.
*/
public java.lang.String getLandscapeLogoAsset(int index) {
return landscapeLogoAsset_.get(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the landscapeLogoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLandscapeLogoAssetBytes(int index) {
return landscapeLogoAsset_.getByteString(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index to set the value at.
* @param value The landscapeLogoAsset to set.
* @return This builder for chaining.
*/
public Builder setLandscapeLogoAsset(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addLandscapeLogoAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param values The landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addAllLandscapeLogoAsset(
java.lang.Iterable<java.lang.String> values) {
ensureLandscapeLogoAssetIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, landscapeLogoAsset_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return This builder for chaining.
*/
public Builder clearLandscapeLogoAsset() {
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The bytes of the landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addLandscapeLogoAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.BrandCampaignAssets)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.BrandCampaignAssets)
private static final com.google.ads.googleads.v20.services.BrandCampaignAssets DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.BrandCampaignAssets();
}
public static com.google.ads.googleads.v20.services.BrandCampaignAssets getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BrandCampaignAssets>
PARSER = new com.google.protobuf.AbstractParser<BrandCampaignAssets>() {
@java.lang.Override
public BrandCampaignAssets parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BrandCampaignAssets> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BrandCampaignAssets> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.BrandCampaignAssets getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,606 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/BrandCampaignAssets.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/campaign_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Assets linked at the campaign level.
* A business_name and at least one logo_asset are required.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.BrandCampaignAssets}
*/
public final class BrandCampaignAssets extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.BrandCampaignAssets)
BrandCampaignAssetsOrBuilder {
private static final long serialVersionUID = 0L;
// Use BrandCampaignAssets.newBuilder() to construct.
private BrandCampaignAssets(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BrandCampaignAssets() {
businessNameAsset_ = "";
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BrandCampaignAssets();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.CampaignServiceProto.internal_static_google_ads_googleads_v21_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.CampaignServiceProto.internal_static_google_ads_googleads_v21_services_BrandCampaignAssets_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.BrandCampaignAssets.class, com.google.ads.googleads.v21.services.BrandCampaignAssets.Builder.class);
}
public static final int BUSINESS_NAME_ASSET_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object businessNameAsset_ = "";
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The businessNameAsset.
*/
@java.lang.Override
public java.lang.String getBusinessNameAsset() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
businessNameAsset_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for businessNameAsset.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBusinessNameAssetBytes() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
businessNameAsset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOGO_ASSET_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the logoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLogoAssetList() {
return logoAsset_;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of logoAsset.
*/
public int getLogoAssetCount() {
return logoAsset_.size();
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The logoAsset at the given index.
*/
public java.lang.String getLogoAsset(int index) {
return logoAsset_.get(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the logoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLogoAssetBytes(int index) {
return logoAsset_.getByteString(index);
}
public static final int LANDSCAPE_LOGO_ASSET_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the landscapeLogoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLandscapeLogoAssetList() {
return landscapeLogoAsset_;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of landscapeLogoAsset.
*/
public int getLandscapeLogoAssetCount() {
return landscapeLogoAsset_.size();
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The landscapeLogoAsset at the given index.
*/
public java.lang.String getLandscapeLogoAsset(int index) {
return landscapeLogoAsset_.get(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the landscapeLogoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLandscapeLogoAssetBytes(int index) {
return landscapeLogoAsset_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(businessNameAsset_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, businessNameAsset_);
}
for (int i = 0; i < logoAsset_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, logoAsset_.getRaw(i));
}
for (int i = 0; i < landscapeLogoAsset_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, landscapeLogoAsset_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(businessNameAsset_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, businessNameAsset_);
}
{
int dataSize = 0;
for (int i = 0; i < logoAsset_.size(); i++) {
dataSize += computeStringSizeNoTag(logoAsset_.getRaw(i));
}
size += dataSize;
size += 1 * getLogoAssetList().size();
}
{
int dataSize = 0;
for (int i = 0; i < landscapeLogoAsset_.size(); i++) {
dataSize += computeStringSizeNoTag(landscapeLogoAsset_.getRaw(i));
}
size += dataSize;
size += 1 * getLandscapeLogoAssetList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.BrandCampaignAssets)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.BrandCampaignAssets other = (com.google.ads.googleads.v21.services.BrandCampaignAssets) obj;
if (!getBusinessNameAsset()
.equals(other.getBusinessNameAsset())) return false;
if (!getLogoAssetList()
.equals(other.getLogoAssetList())) return false;
if (!getLandscapeLogoAssetList()
.equals(other.getLandscapeLogoAssetList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BUSINESS_NAME_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getBusinessNameAsset().hashCode();
if (getLogoAssetCount() > 0) {
hash = (37 * hash) + LOGO_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getLogoAssetList().hashCode();
}
if (getLandscapeLogoAssetCount() > 0) {
hash = (37 * hash) + LANDSCAPE_LOGO_ASSET_FIELD_NUMBER;
hash = (53 * hash) + getLandscapeLogoAssetList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.BrandCampaignAssets prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Assets linked at the campaign level.
* A business_name and at least one logo_asset are required.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.BrandCampaignAssets}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.BrandCampaignAssets)
com.google.ads.googleads.v21.services.BrandCampaignAssetsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.CampaignServiceProto.internal_static_google_ads_googleads_v21_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.CampaignServiceProto.internal_static_google_ads_googleads_v21_services_BrandCampaignAssets_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.BrandCampaignAssets.class, com.google.ads.googleads.v21.services.BrandCampaignAssets.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.BrandCampaignAssets.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
businessNameAsset_ = "";
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.CampaignServiceProto.internal_static_google_ads_googleads_v21_services_BrandCampaignAssets_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.BrandCampaignAssets getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.BrandCampaignAssets.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.BrandCampaignAssets build() {
com.google.ads.googleads.v21.services.BrandCampaignAssets result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.BrandCampaignAssets buildPartial() {
com.google.ads.googleads.v21.services.BrandCampaignAssets result = new com.google.ads.googleads.v21.services.BrandCampaignAssets(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.BrandCampaignAssets result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.businessNameAsset_ = businessNameAsset_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
logoAsset_.makeImmutable();
result.logoAsset_ = logoAsset_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
landscapeLogoAsset_.makeImmutable();
result.landscapeLogoAsset_ = landscapeLogoAsset_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.BrandCampaignAssets) {
return mergeFrom((com.google.ads.googleads.v21.services.BrandCampaignAssets)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.BrandCampaignAssets other) {
if (other == com.google.ads.googleads.v21.services.BrandCampaignAssets.getDefaultInstance()) return this;
if (!other.getBusinessNameAsset().isEmpty()) {
businessNameAsset_ = other.businessNameAsset_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.logoAsset_.isEmpty()) {
if (logoAsset_.isEmpty()) {
logoAsset_ = other.logoAsset_;
bitField0_ |= 0x00000002;
} else {
ensureLogoAssetIsMutable();
logoAsset_.addAll(other.logoAsset_);
}
onChanged();
}
if (!other.landscapeLogoAsset_.isEmpty()) {
if (landscapeLogoAsset_.isEmpty()) {
landscapeLogoAsset_ = other.landscapeLogoAsset_;
bitField0_ |= 0x00000004;
} else {
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.addAll(other.landscapeLogoAsset_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
businessNameAsset_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
java.lang.String s = input.readStringRequireUtf8();
ensureLogoAssetIsMutable();
logoAsset_.add(s);
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object businessNameAsset_ = "";
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The businessNameAsset.
*/
public java.lang.String getBusinessNameAsset() {
java.lang.Object ref = businessNameAsset_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
businessNameAsset_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for businessNameAsset.
*/
public com.google.protobuf.ByteString
getBusinessNameAssetBytes() {
java.lang.Object ref = businessNameAsset_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
businessNameAsset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The businessNameAsset to set.
* @return This builder for chaining.
*/
public Builder setBusinessNameAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
businessNameAsset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBusinessNameAsset() {
businessNameAsset_ = getDefaultInstance().getBusinessNameAsset();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the business name text asset.
* </pre>
*
* <code>string business_name_asset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for businessNameAsset to set.
* @return This builder for chaining.
*/
public Builder setBusinessNameAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
businessNameAsset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureLogoAssetIsMutable() {
if (!logoAsset_.isModifiable()) {
logoAsset_ = new com.google.protobuf.LazyStringArrayList(logoAsset_);
}
bitField0_ |= 0x00000002;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the logoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLogoAssetList() {
logoAsset_.makeImmutable();
return logoAsset_;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of logoAsset.
*/
public int getLogoAssetCount() {
return logoAsset_.size();
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The logoAsset at the given index.
*/
public java.lang.String getLogoAsset(int index) {
return logoAsset_.get(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the logoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLogoAssetBytes(int index) {
return logoAsset_.getByteString(index);
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The logoAsset to set.
* @return This builder for chaining.
*/
public Builder setLogoAsset(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLogoAssetIsMutable();
logoAsset_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The logoAsset to add.
* @return This builder for chaining.
*/
public Builder addLogoAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLogoAssetIsMutable();
logoAsset_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The logoAsset to add.
* @return This builder for chaining.
*/
public Builder addAllLogoAsset(
java.lang.Iterable<java.lang.String> values) {
ensureLogoAssetIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, logoAsset_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearLogoAsset() {
logoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of square logo assets.
* </pre>
*
* <code>repeated string logo_asset = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the logoAsset to add.
* @return This builder for chaining.
*/
public Builder addLogoAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureLogoAssetIsMutable();
logoAsset_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureLandscapeLogoAssetIsMutable() {
if (!landscapeLogoAsset_.isModifiable()) {
landscapeLogoAsset_ = new com.google.protobuf.LazyStringArrayList(landscapeLogoAsset_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the landscapeLogoAsset.
*/
public com.google.protobuf.ProtocolStringList
getLandscapeLogoAssetList() {
landscapeLogoAsset_.makeImmutable();
return landscapeLogoAsset_;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of landscapeLogoAsset.
*/
public int getLandscapeLogoAssetCount() {
return landscapeLogoAsset_.size();
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The landscapeLogoAsset at the given index.
*/
public java.lang.String getLandscapeLogoAsset(int index) {
return landscapeLogoAsset_.get(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the landscapeLogoAsset at the given index.
*/
public com.google.protobuf.ByteString
getLandscapeLogoAssetBytes(int index) {
return landscapeLogoAsset_.getByteString(index);
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index to set the value at.
* @param value The landscapeLogoAsset to set.
* @return This builder for chaining.
*/
public Builder setLandscapeLogoAsset(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addLandscapeLogoAsset(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param values The landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addAllLandscapeLogoAsset(
java.lang.Iterable<java.lang.String> values) {
ensureLandscapeLogoAssetIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, landscapeLogoAsset_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return This builder for chaining.
*/
public Builder clearLandscapeLogoAsset() {
landscapeLogoAsset_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Optional. The resource name of landscape logo assets.
* </pre>
*
* <code>repeated string landscape_logo_asset = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The bytes of the landscapeLogoAsset to add.
* @return This builder for chaining.
*/
public Builder addLandscapeLogoAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureLandscapeLogoAssetIsMutable();
landscapeLogoAsset_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.BrandCampaignAssets)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.BrandCampaignAssets)
private static final com.google.ads.googleads.v21.services.BrandCampaignAssets DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.BrandCampaignAssets();
}
public static com.google.ads.googleads.v21.services.BrandCampaignAssets getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BrandCampaignAssets>
PARSER = new com.google.protobuf.AbstractParser<BrandCampaignAssets>() {
@java.lang.Override
public BrandCampaignAssets parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BrandCampaignAssets> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BrandCampaignAssets> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.BrandCampaignAssets getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 37,150 | prj/test/functional/guardian/src/main/java/guardian/GuardianTests.java | /*
* Copyright (c) 2000, 2024, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
package guardian;
import com.oracle.coherence.testing.junit.ThreadDumpOnTimeoutRule;
import com.tangosol.net.CacheFactory;
import com.tangosol.net.CacheService;
import com.tangosol.net.Cluster;
import com.tangosol.net.Guardable;
import com.tangosol.net.management.MBeanHelper;
import com.tangosol.net.Guardian;
import com.tangosol.net.NamedCache;
import com.tangosol.net.Service;
import com.tangosol.net.ServiceFailurePolicy;
import com.tangosol.net.RequestTimeoutException;
import com.tangosol.net.ServiceStoppedException;
import com.tangosol.util.Base;
import com.tangosol.util.InvocableMap;
import com.tangosol.util.processor.AbstractProcessor;
import java.net.ServerSocket;
import java.util.Enumeration;
import java.util.Properties;
import com.oracle.bedrock.testsupport.deferred.Eventually;
import com.tangosol.coherence.component.util.Daemon;
import com.tangosol.coherence.component.util.SafeService;
import com.tangosol.coherence.component.util.daemon.queueProcessor.service.grid.partitionedService.PartitionedCache;
import com.tangosol.internal.net.cluster.DefaultServiceFailurePolicy;
import java.util.concurrent.TimeUnit;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import static com.oracle.bedrock.deferred.DeferredHelper.invoking;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.*;
/**
* Test the Guardian.
*
* @author rhl 2010.03.17
*/
public class GuardianTests
{
// ----- test methods -------------------------------------------------
/**
* Test that the numbers of soft-timeout and hard-timeout are correctly recorded by the
* GuardSoftTimeout and GuardHardTimeout attributes in the JMX.
*/
@Test
public void testGuardTimeoutDisplay()
{
logWarn("testGuardTimeoutDisplay", true);
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "2");
System.setProperty("test.guardian.GuardianTests.request.timeout", "15s");
System.setProperty("coherence.log.level", "9");
System.setProperty("coherence.management", "all");
CacheService service = startService("PartitionedCacheDefaultPolicies");
NamedCache cache = service.ensureCache("foo", null);
Cluster cluster = CacheFactory.getCluster();
MBeanServer server = MBeanHelper.findMBeanServer();
checkNodeGuardTimeout(cluster, server, "Recover", 0);
checkNodeGuardTimeout(cluster, server, "Terminate", 0);
// Note: the timeouts (60s) are significantly larger than the guardian
// timeout (3s) in the hope it will accommodate for late guardians
// check soft-timeouts
for (int i = 1; i <= 5; i++)
{
cache.invoke("SoftKey-" + i, new LoserTask(60000L, /*fInterruptible*/ true));
checkNodeGuardTimeout(cluster, server, "Recover", i);
}
// reset statistics
resetNodeGuardTimeoutCount(cluster, server);
checkNodeGuardTimeout(cluster, server, "Recover", 0);
// check hard-timeouts
for (int i = 1; i <= 5; i++)
{
try
{
cache.invoke("HardKey-" + i, new LoserTask(60000L, /*fInterruptible*/ false));
}
catch (RuntimeException e)
{
}
checkNodeGuardTimeout(cluster, server, "Recover", i);
checkNodeGuardTimeout(cluster, server, "Terminate", i);
}
}
finally
{
CacheFactory.shutdown();
}
logWarn("testGuardTimeoutDisplay", false);
}
/**
* Test that guardian-timeout of shorter than an EntryProcessor task will
* cause the EP to be interrupted.
*/
@Test
public void testRecovery()
{
CacheService service;
NamedCache cache;
Object oResult;
CustomServiceFailurePolicy policy;
long cDelay = 10000L;
logWarn("testRecovery", true);
// test interruptible service-thread task
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "0");
service = startService("PartitionedCacheDefaultPolicies");
assertTrue(service.isRunning());
cache = service.ensureCache("foo", null);
oResult = cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ true));
assertNotSame("Slow task was not interrupted after " + cDelay + "ms", 0, oResult);
}
finally
{
CacheFactory.shutdown();
}
// test interruptible worker-thread task
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "2");
service = startService("PartitionedCacheDefaultPolicies");
assertTrue(service.isRunning());
cache = service.ensureCache("foo", null);
oResult = cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ true));
assertNotSame("Slow task was not interrupted after " + cDelay + "ms", 0, oResult);
}
finally
{
CacheFactory.shutdown();
}
// test custom policy recovery on service thread
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "0");
System.setProperty("test.guardian.GuardianTests.custompolicy.initparam0",
"" + CustomServiceFailurePolicy.TYPE_DEFAULT);
service = startService("PartitionedCacheCustomPolicy");
assertTrue(service.isRunning());
cache = service.ensureCache("foo", null);
oResult = cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ true));
policy = getServicePolicy(service);
assertNotSame("Slow task was not interrupted after " + cDelay + "ms", 0, oResult);
assertNotNull(policy);
assertEquals(1, policy.m_cRecover);
assertEquals(0, policy.m_cTerminate);
}
finally
{
CacheFactory.shutdown();
}
// test custom policy recovery on worker thread
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "2");
System.setProperty("test.guardian.GuardianTests.custompolicy.initparam0",
"" + CustomServiceFailurePolicy.TYPE_DEFAULT);
service = startService("PartitionedCacheCustomPolicy");
cache = service.ensureCache("foo", null);
assertTrue(service.isRunning());
oResult = cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ true));
policy = getServicePolicy(service);
assertNotSame("Slow task was not interrupted after " + cDelay + "ms", 0, oResult);
assertNotNull(policy);
assertEquals(1, policy.m_cRecover);
assertEquals(0, policy.m_cTerminate);
}
finally
{
CacheFactory.shutdown();
}
// test custom policy recovery on service thread
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "0");
System.setProperty("test.guardian.GuardianTests.custompolicy.initparam0",
"" + CustomServiceFailurePolicy.TYPE_SKIP_ONE);
service = startService("PartitionedCacheCustomPolicy");
assertTrue(service.isRunning());
cache = service.ensureCache("foo", null);
oResult = cache.invoke("key", new LoserTask(15000, /*fInterruptible*/ true));
policy = getServicePolicy(service);
assertNotSame("Slow task was not interrupted after " + cDelay + "ms", 0, oResult);
assertNotNull(policy);
assertEquals(2, policy.m_cRecover);
assertEquals(0, policy.m_cTerminate);
}
finally
{
CacheFactory.shutdown();
}
// test custom policy recovery on worker thread
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "2");
System.setProperty("test.guardian.GuardianTests.custompolicy.initparam0",
"" + CustomServiceFailurePolicy.TYPE_SKIP_ONE);
service = startService("PartitionedCacheCustomPolicy");
assertTrue(service.isRunning());
cache = service.ensureCache("foo", null);
oResult = cache.invoke("key", new LoserTask(15000, /*fInterruptible*/ true));
policy = getServicePolicy(service);
assertNotSame("Slow task was not interrupted after " + cDelay + "ms", 0, oResult);
assertNotNull(policy);
assertEquals(2, policy.m_cRecover);
assertEquals(0, policy.m_cTerminate);
}
finally
{
CacheFactory.shutdown();
}
// test custom policy recovery on worker thread, different task-timeout
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "60000");
System.setProperty("test.guardian.GuardianTests.task.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "2");
System.setProperty("test.guardian.GuardianTests.custompolicy.initparam0",
"" + CustomServiceFailurePolicy.TYPE_DEFAULT);
service = startService("PartitionedCacheCustomPolicy");
assertTrue(service.isRunning());
cache = service.ensureCache("foo", null);
oResult = cache.invoke("key", new LoserTask(15000, /*fInterruptible*/ true));
policy = getServicePolicy(service);
assertNotSame("Slow task was not interrupted after " + cDelay + "ms", 0, oResult);
assertNotNull(policy);
assertEquals(1, policy.m_cRecover);
assertEquals(0, policy.m_cTerminate);
}
finally
{
CacheFactory.shutdown();
}
logWarn("testRecovery", false);
}
/**
* Test terminate
*/
@Test
public void testTerminate()
{
CacheService service;
NamedCache cache;
CustomServiceFailurePolicy policy;
long cDelay = 10000L;
logWarn("testTerminate", true);
// test terminate of a task on the service thread
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "0");
service = startService("PartitionedCacheDefaultPolicies");
cache = service.ensureCache("foo", null);
try
{
// service termination will result in task retry
cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ false));
}
catch (ServiceStoppedException e)
{
}
assertFalse("Service was not terminated", service.isRunning());
}
finally
{
CacheFactory.shutdown();
}
// test terminate of a task on a worker thread
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "2");
System.setProperty("test.guardian.GuardianTests.request.timeout", "15s");
service = startService("PartitionedCacheDefaultPolicies");
cache = service.ensureCache("foo", null);
try
{
// worker thread task termination should not
cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ false));
}
catch (RuntimeException e)
{
if (Base.getOriginalException(e) instanceof RequestTimeoutException)
{
// expected
}
else
{
throw e;
}
}
assertTrue("Service was terminated", service.isRunning());
}
finally
{
CacheFactory.shutdown();
}
// test terminate of a task on a service thread with a custom policy
try
{
System.setProperty("test.guardian.GuardianTests.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.threads", "0");
System.setProperty("test.guardian.GuardianTests.custompolicy.initparam0",
"" + CustomServiceFailurePolicy.TYPE_DEFAULT);
service = startService("PartitionedCacheCustomPolicy");
cache = service.ensureCache("foo", null);
// pre-fetch the policy, as we expect the service to be terminated
policy = getServicePolicy(service);
try
{
// service termination will result in task retry
cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ false));
}
catch (ServiceStoppedException e)
{
}
assertNotNull(policy);
assertEquals(1, policy.m_cRecover);
assertEquals(1, policy.m_cTerminate);
assertEquals(1, policy.m_cServiceFailed);
assertFalse("Service was not terminated", service.isRunning());
}
finally
{
CacheFactory.shutdown();
}
logWarn("testTerminate", false);
}
/**
* Test terminate using the inherited global settings from the cluster.
*/
@Test
public void testTerminateInheritGlobal()
{
CacheService service;
NamedCache cache;
CustomServiceFailurePolicy policy;
long cDelay = 10000L;
logWarn("testTerminateInheritGlobal", true);
// test terminate of a task on the service thread
try
{
System.setProperty("coherence.override", "guardian-coherence-override.xml");
System.setProperty("test.guardian.GuardianTests.global.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.global.policy", "exit-cluster");
System.setProperty("test.guardian.GuardianTests.threads", "0");
service = startService("PartitionedCacheDefaultPolicies");
cache = service.ensureCache("foo", null);
try
{
// service termination will result in task retry
cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ false));
}
catch (ServiceStoppedException e)
{
}
assertFalse("Service was not terminated", service.isRunning());
}
finally
{
System.clearProperty("coherence.override");
System.clearProperty("test.guardian.GuardianTests.global.timeout");
System.clearProperty("test.guardian.GuardianTests.global.policy");
CacheFactory.shutdown();
}
// test terminate of a task on a worker thread
try
{
System.setProperty("coherence.override", "guardian-coherence-override.xml");
System.setProperty("test.guardian.GuardianTests.global.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.global.policy", "exit-cluster");
System.setProperty("test.guardian.GuardianTests.threads", "2");
System.setProperty("test.guardian.GuardianTests.request.timeout", "15s");
service = startService("PartitionedCacheDefaultPolicies");
cache = service.ensureCache("foo", null);
try
{
// worker thread task termination should not
cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ false));
}
catch (RuntimeException e)
{
if (Base.getOriginalException(e) instanceof RequestTimeoutException)
{
// expected
}
else
{
throw e;
}
}
assertTrue("Service was terminated", service.isRunning());
}
finally
{
System.clearProperty("coherence.override");
System.clearProperty("test.guardian.GuardianTests.global.timeout");
System.clearProperty("test.guardian.GuardianTests.global.policy");
CacheFactory.shutdown();
}
// test terminate of a task on a service thread with a custom policy
try
{
System.setProperty("coherence.override", "guardian-coherence-override.xml");
System.setProperty("test.guardian.GuardianTests.global.timeout", "3000");
System.setProperty("test.guardian.GuardianTests.global.policy", "exit-cluster");
System.setProperty("test.guardian.GuardianTests.threads", "0");
System.setProperty("test.guardian.GuardianTests.custompolicy.initparam0",
"" + CustomServiceFailurePolicy.TYPE_DEFAULT);
service = startService("PartitionedCacheCustomPolicy");
cache = service.ensureCache("foo", null);
// pre-fetch the policy, as we expect the service to be terminated
policy = getServicePolicy(service);
try
{
// service termination will result in task retry
cache.invoke("key", new LoserTask(cDelay, /*fInterruptible*/ false));
}
catch (ServiceStoppedException e)
{
}
assertNotNull(policy);
Eventually.assertThat(invoking(policy).getRecoverCount(), is(1));
Eventually.assertThat(invoking(policy).getTerminateCount(), is(1));
Eventually.assertThat(invoking(policy).getServiceFailedCount(), is(1));
assertFalse("Service was not terminated", service.isRunning());
}
finally
{
System.clearProperty("coherence.override");
System.clearProperty("test.guardian.GuardianTests.global.timeout");
System.clearProperty("test.guardian.GuardianTests.global.policy");
CacheFactory.shutdown();
}
logWarn("testTerminateInheritGlobal", false);
}
/**
* Test that guardian-timeout of 0 causes a default "logging"
* policy to be used. (COH-3090)
*/
@Test
public void testDefaultLogging()
{
logWarn("testDefaultLogging", true);
Properties props = new Properties();
final String sServiceName = "PartitionedCacheDefaultPolicies";
try
{
props.setProperty("test.guardian.GuardianTests.timeout", "0");
// test service-thread task
props.setProperty("test.guardian.GuardianTests.threads", "0");
doTestLogging(sServiceName, props, new Runnable()
{
public void run()
{
// re-register the guardable with a 3sec timeout
CacheService service = startService(sServiceName);
SafeService serviceSafe = (SafeService) service;
PartitionedCache serviceReal = (PartitionedCache) serviceSafe.getService();
Base.out("Modifying guardian-timeout to 3 seconds");
Daemon.Guard guard = serviceReal.getGuardable();
Eventually.assertThat(invoking(guard).getContext(), is(notNullValue()));
Guardian.GuardContext ctx = guard.getContext();
Guardian guardian = ctx.getGuardian();
guardian.guard(guard, 3000L, 0.9F);
}
});
// test worker-thread task
props.setProperty("test.guardian.GuardianTests.threads", "2");
doTestLogging(sServiceName, props, new Runnable()
{
public void run()
{
// change the guard-timeout to 3seconds
CacheService service = startService(sServiceName);
SafeService serviceSafe = (SafeService) service;
PartitionedCache serviceReal = (PartitionedCache) serviceSafe.getService();
Base.out("Modifying task-timeout to 3 seconds");
serviceReal.getDaemonPool().setTaskTimeout(3000L);
}
});
}
finally
{
clearProps(props);
}
logWarn("testDefaultLogging", false);
}
/**
* Test the "logging" policy.
*/
@Test
public void testLogging()
{
logWarn("testLogging", true);
Properties props = new Properties();
try
{
props.setProperty("test.guardian.GuardianTests.timeout", "3s");
// test service thread
doTestLogging("PartitionedCacheLoggingPolicy", props, null);
props.setProperty("test.guardian.GuardianTests.threads", "0");
// test worker thread
props.setProperty("test.guardian.GuardianTests.threads", "2");
doTestLogging("PartitionedCacheLoggingPolicy", props, null);
}
finally
{
clearProps(props);
}
logWarn("testLogging", false);
}
/**
* Test the "logging" policy overriding a global policy.
*/
@Test
public void testLoggingOverridingGlobal()
{
logWarn("testLoggingOverridingGlobal", true);
Properties props = new Properties();
try
{
props.setProperty("coherence.override",
"guardian-coherence-override.xml");
props.setProperty("test.guardian.GuardianTests.global.timeout", "60000");
props.setProperty("test.guardian.GuardianTests.global.policy", "exit-cluster");
props.setProperty("test.guardian.GuardianTests.timeout", "3s");
// test service thread
doTestLogging("PartitionedCacheLoggingPolicy", props, null);
props.setProperty("test.guardian.GuardianTests.threads", "0");
// test worker thread
props.setProperty("test.guardian.GuardianTests.threads", "2");
doTestLogging("PartitionedCacheLoggingPolicy", props, null);
}
finally
{
clearProps(props);
}
logWarn("testLoggingOverridingGlobal", false);
}
/**
* Test the "logging" policy overriding a global policy.
*/
@Test
public void testInheritGlobalLogging()
{
logWarn("testInheritGlobalLogging", true);
Properties props = new Properties();
try
{
props.setProperty("coherence.override",
"guardian-coherence-override.xml");
props.setProperty("test.guardian.GuardianTests.global.timeout", "3000");
props.setProperty("test.guardian.GuardianTests.global.policy", "logging");
// test service thread
doTestLogging("PartitionedCacheNoPolicy", props, null);
props.setProperty("test.guardian.GuardianTests.threads", "0");
// test worker thread
props.setProperty("test.guardian.GuardianTests.threads", "2");
doTestLogging("PartitionedCacheNoPolicy", props, null);
}
finally
{
clearProps(props);
}
logWarn("testInheritGlobalLogging", false);
}
/**
* Clear the specified System properties.
*
* @param props the properties to remove
*/
protected void clearProps(Properties props)
{
for (Enumeration enumNames = props.propertyNames(); enumNames.hasMoreElements();)
{
System.clearProperty((String) enumNames.nextElement());
}
}
/**
* Helper method for testing "logging" configuration
*/
protected void doTestLogging(String sServiceName, Properties props, Runnable runnable)
{
CacheService service;
NamedCache cache;
Object oResult;
// test service-thread task
try
{
System.getProperties().putAll(props);
service = startService(sServiceName);
SafeService serviceSafe = (SafeService) service;
PartitionedCache serviceReal = (PartitionedCache) serviceSafe.getService();
// check to verify that the policy type is logging
DefaultServiceFailurePolicy oPolicy = (DefaultServiceFailurePolicy) serviceReal.getServiceFailurePolicy();
int oType = oPolicy.getPolicyType();
// see Cluster$DefaultFailurePolicy.POLICY_LOGGING
assertEquals(3, oType);
if (runnable != null)
{
runnable.run();
}
// Note: send an invocable with a duration that is longer than
// the timeout, but not too long (so as to cause request-timeout)
// as the task will not be recovered/interrupted (just logging)
cache = service.ensureCache("foo", null);
oResult = cache.invoke("key", new LoserTask(5000, /*fInterruptible*/ true));
assertEquals("Logging policy should not result in recovery",
0, oResult);
}
finally
{
CacheFactory.shutdown();
}
}
// ----- inner class: CustomServiceFailurePolicy ----------------------
/**
* Instantiate a CustomServiceFailurePolicy of the specified type.
*
* @param nPolicyType the policy type
*
* @return a CustomServiceFailurePolicy
*/
public static ServiceFailurePolicy instantiateCustomPolicy(int nPolicyType)
{
return new CustomServiceFailurePolicy(nPolicyType);
}
/**
*
*/
protected static class CustomServiceFailurePolicy
implements ServiceFailurePolicy
{
/**
* Construct a CustomServiceFailurePolicy of the specified type
*
* @param nPolicyType the policy type
*/
public CustomServiceFailurePolicy(int nPolicyType)
{
m_nPolicyType = nPolicyType;
}
// ----- ServiceFailurePolicy methods ----------------------------
/**
* {@inheritDoc}
*/
public void onGuardableRecovery(Guardable guardable, Service service)
{
Base.out("CustomServiceFailurePolicy: onGuardableRecovery() " + guardable);
++m_cRecover;
switch (m_nPolicyType)
{
default:
case TYPE_DEFAULT:
guardable.recover();
break;
case TYPE_SKIP_ONE:
if (m_cRecover > 1)
{
guardable.recover();
}
else
{
guardable.getContext().heartbeat();
}
break;
}
}
/**
* {@inheritDoc}
*/
public void onGuardableTerminate(Guardable guardable, Service service)
{
Base.out("CustomServiceFailurePolicy: onGuardableTerminate() " + guardable);
++m_cTerminate;
switch (m_nPolicyType)
{
default:
case TYPE_DEFAULT:
case TYPE_SKIP_ONE:
guardable.terminate();
break;
}
}
/**
* {@inheritDoc}
*/
public void onServiceFailed(Cluster cluster)
{
++m_cServiceFailed;
}
// ----- accessors ------------------------------------------------
public int getRecoverCount()
{
return m_cRecover;
}
public int getTerminateCount()
{
return m_cTerminate;
}
public int getServiceFailedCount()
{
return m_cServiceFailed;
}
// ----- constants and data members ------------------------------
/**
* Default policy type (delegate recover() and terminate())
*/
public static final int TYPE_DEFAULT = 0;
/**
* Policy type that skips one recovery period (soft-timeout)
*/
public static final int TYPE_SKIP_ONE = 1;
/**
* The policy type
*/
protected int m_nPolicyType;
/**
* The number of times this policy has been asked to recover a guardable
*/
protected int m_cRecover;
/**
* The number of times this policy has been asked to terminate a guardable
*/
protected int m_cTerminate;
/**
* The number of times this policy has been asked to handle a failed
* service termination.
*/
protected int m_cServiceFailed;
}
// ----- inner class: LoserTask ---------------------------------------
/**
* LoserTask is the entry processor used to simulate a long-running (hung) task.
*/
protected static class LoserTask
extends AbstractProcessor
{
public LoserTask(long cDelay, boolean fInterruptible)
{
m_cDelay = cDelay;
m_fInterruptible = fInterruptible;
}
// ----- EntryProcessor methods -----------------------------------
/**
* {@inheritDoc}
*/
public Object process(InvocableMap.Entry entry)
{
long cWait = m_cDelay;
int cInterrupt = 0;
long ldtNow = Base.getSafeTimeMillis();
while (cWait >= 0)
{
long ldtLast = ldtNow;
try
{
Thread.sleep(cWait);
}
catch (InterruptedException e)
{
++cInterrupt;
if (m_fInterruptible)
{
// let this thread be interrupted
break;
}
// Note: intentionally do not reset the interrupted
// flag, as we simulate "handling" the interrupt
}
cWait -= ((ldtNow = Base.getSafeTimeMillis()) - ldtLast);
if (cWait > 120000L) // make sure wait does not exceed 2 minutes.
{
System.out.println("GuardianTests.LoserTask.process(), cWait: " + cWait + ", ldtNow: " + ldtNow + ", ldtLast: " + ldtLast + ", cInterrupt: " + cInterrupt);
cWait = 120000L;
}
}
// return the number of times this thread was interrupted
return cInterrupt;
}
// ----- data members ---------------------------------------------
/**
* The amount of time the task should take to run.
*/
protected long m_cDelay;
/**
* Is this task interruptible?
*/
protected volatile boolean m_fInterruptible;
}
// ----- helpers ------------------------------------------------------
/**
* Check the whether the timeout number in JMX is correct.
*
* @param cluster the current cluster
* @param server the MBean server
* @param sType there are only two types of the timeout, "Soft" and "Hard"
* @param cTrueNumber the expected number of timeout
*/
protected void checkNodeGuardTimeout(Cluster cluster, MBeanServer server, String sType, int cTrueNumber)
{
try
{
int mNodeId = cluster.getLocalMember().getId();
ObjectName oBeanName = new ObjectName("Coherence:type=Node,nodeId=" + mNodeId);
Eventually.assertThat(invoking(this).getMBeanAttribute(server, oBeanName, "Guard" + sType + "Count"), is(cTrueNumber));
}
catch (Exception e)
{
Assert.fail(Base.printStackTrace(e));
}
}
public Object getMBeanAttribute(MBeanServer server, ObjectName oBeanName, String sName)
{
try
{
return server.getAttribute(oBeanName, sName);
}
catch (Exception e)
{
return null;
}
}
/**
* Reset all the monitored statistics
*
* @param cluster the current cluster
* @param server the MBean server
*/
protected void resetNodeGuardTimeoutCount(Cluster cluster, MBeanServer server)
{
try
{
int mNodeId = cluster.getLocalMember().getId();
ObjectName oBeanName = new ObjectName("Coherence:type=Node,nodeId=" + mNodeId);
server.invoke(oBeanName, "resetStatistics", null, null);
}
catch (Exception e)
{
Assert.fail(Base.printStackTrace(e));
}
}
/**
* Return a CacheService by the specified name.
*
* @return a CacheService that has been started
*/
protected CacheService startService(String sName)
{
return (CacheService) CacheFactory.getService(sName);
}
/**
* Log a warning (that can be easily scraped) that guardian errors (and
* stack traces) are expected.
*
* @param sTestName the name of the test
* @param fHeader if true, log the header, else the footer
*/
protected void logWarn(String sTestName, boolean fHeader)
{
// Note: use System.out instead of Base.out here to avoid interfering
// with the tests' initialization of Coherence
if (fHeader)
{
System.out.println("+++ " + sTestName + ": This test is expected to produce guardian error messages +++");
}
else
{
System.out.println("--- " + sTestName + " ---");
}
}
/**
* Return the custom ServiceFailurePolicy configured on the specified service, or null
*
* @param service the service
*
* @return the CustomServiceFailurePolicy on the specified service, or null
*/
protected CustomServiceFailurePolicy getServicePolicy(Service service)
{
SafeService serviceSafe = (SafeService) service;
PartitionedCache serviceReal = (PartitionedCache) serviceSafe.getService();
return (CustomServiceFailurePolicy) serviceReal.getServiceFailurePolicy();
}
/**
* A JUnit rule that will cause the test to fail if it runs too long.
* A thread dump will be generated on failure.
*/
@ClassRule
public static final ThreadDumpOnTimeoutRule timeout
= ThreadDumpOnTimeoutRule.after(15, TimeUnit.MINUTES, true);
}
|
apache/juneau | 36,407 | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/collections/JsonList.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.collections;
import static org.apache.juneau.common.utils.ThrowableUtils.*;
import static org.apache.juneau.internal.ConsumerUtils.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
import java.util.function.*;
import org.apache.juneau.*;
import org.apache.juneau.common.utils.*;
import org.apache.juneau.json.*;
import org.apache.juneau.marshaller.*;
import org.apache.juneau.objecttools.*;
import org.apache.juneau.parser.*;
import org.apache.juneau.serializer.*;
/**
* Java implementation of a JSON array.
*
* <p>
* An extension of {@link LinkedList}, so all methods available to in that class are also available to this class.
*
* <p>
* Note that the use of this class is optional for generating JSON. The serializers will accept any objects that implement the
* {@link Collection} interface. But this class provides some useful additional functionality when working with JSON
* models constructed from Java Collections Framework objects. For example, a constructor is provided for converting a
* JSON array string directly into a {@link List}. It also contains accessor methods for to avoid common typecasting
* when accessing elements in a list.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Construct an empty List</jc>
* JsonList <jv>list</jv> = JsonList.<jsm>of</jsm>();
*
* <jc>// Construct a list of objects using various methods</jc>
* <jv>list</jv> = JsonList.<jsm>of</jsm>().a(<js>"foo"</js>).a(123).a(<jk>true</jk>);
* <jv>list</jv> = JsonList.<jsm>of</jsm>().a(<js>"foo"</js>, 123, <jk>true</jk>); <jc>// Equivalent</jc>
* <jv>list</jv> = JsonList.<jsm>of</jsm>(<js>"foo"</js>, 123, <jk>true</jk>); <jc>// Equivalent</jc>
*
* <jc>// Construct a list of integers from JSON</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[1,2,3]"</js>);
*
* <jc>// Construct a list of generic JsonMap objects from JSON</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[{foo:'bar'},{baz:'bing'}]"</js>);
*
* <jc>// Construct a list of integers from XML</jc>
* String <jv>xml</jv> = <js>"<array><number>1</number><number>2</number><number>3</number></array>"</js>;
* <jv>list</jv> = JsonList.<jsm>of</jsm>(<jv>xml</jv>, XmlParser.<jsf>DEFAULT</jsf>);
* <jv>list</jv> = (List)XmlParser.<jsf>DEFAULT</jsf>.parse(<jv>xml</jv>); <jc>// Equivalent</jc>
* <jv>list</jv> = (List)XmlParser.<jsf>DEFAULT</jsf>.parse(Object.<jk>class</jk>, <jv>xml</jv>); <jc>// Equivalent</jc>
* <jv>list</jv> = XmlParser.<jsf>DEFAULT</jsf>.parse(List.<jk>class</jk>, <jv>xml</jv>); <jc>// Equivalent</jc>
* <jv>list</jv> = XmlParser.<jsf>DEFAULT</jsf>.parse(JsonList.<jk>class</jk>, <jv>xml</jv>); <jc>// Equivalent</jc>
*
* <jc>// Construct JSON from JsonList</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[{foo:'bar'},{baz:'bing'}]"</js>);
* String <jv>json</jv> = <jv>list</jv>.toString(); <jc>// Produces "[{foo:'bar'},{baz:'bing'}]"</jc>
* <jv>json</jv> = <jv>list</jv>.toString(JsonSerializer.<jsf>DEFAULT</jsf>); <jc>// Equivalent</jc>
* <jv>json</jv> = JsonSerializer.<jsf>DEFAULT</jsf>.serialize(<jv>list</jv>); <jc>// Equivalent</jc>
*
* <jc>// Get one of the entries in the list as an Integer</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[1,2,3]"</js>);
* Integer <jv>integer</jv> = <jv>list</jv>.getInt(1);
* <jv>list</jv> = <jv>list</jv>.get(Integer.<jk>class</jk>, 1); <jc>// Equivalent</jc>
*
* <jc>// Get one of the entries in the list as an Float</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[1,2,3]"</js>);
* Float <jv>_float</jv> = <jv>list</jv>.getFloat(1); <jc>// Returns 2f </jc>
* <jv>_float</jv> = <jv>list</jv>.get(Float.<jk>class</jk>, 1); <jc>// Equivalent</jc>
*
* <jc>// Same as above, except converted to a String</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[1,2,3]"</js>);
* String <jv>string</jv> = <jv>list</jv>.getString(1); <jc>// Returns "2" </jc>
* <jv>string</jv> = <jv>list</jv>.get(String.<jk>class</jk>, 1); <jc>// Equivalent</jc>
*
* <jc>// Get one of the entries in the list as a bean (converted to a bean if it isn't already one)</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[{name:'John Smith',age:45}]"</js>);
* Person <jv>person</jv> = <jv>list</jv>.get(Person.<jk>class</jk>, 0);
*
* <jc>// Iterate over a list of beans using the elements() method</jc>
* <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[{name:'John Smith',age:45}]"</js>);
* <jk>for</jk> (Person <jv>person</jv> : <jv>list</jv>.elements(Person.<jk>class</jk>) {
* <jc>// Do something with p</jc>
* }
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='warn'>This class is not thread safe.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* </ul>
*
* @serial exclude
*/
public class JsonList extends LinkedList<Object> {
//-----------------------------------------------------------------------------------------------------------------
// Static
//-----------------------------------------------------------------------------------------------------------------
private static final long serialVersionUID = 1L;
/**
* Parses a string that can consist of either a JSON array or comma-delimited list.
*
* <p>
* The type of string is auto-detected.
*
* @param s The string to parse.
* @return The parsed string.
* @throws ParseException Malformed input encountered.
*/
public static JsonList ofJsonOrCdl(String s) throws ParseException {
if (Utils.isEmpty(s))
return null;
if (! StringUtils.isJsonArray(s, true))
return new JsonList((Object[])Utils.splita(s.trim(), ','));
return new JsonList(s);
}
//-----------------------------------------------------------------------------------------------------------------
// Instance
//-----------------------------------------------------------------------------------------------------------------
transient BeanSession session = null;
private transient ObjectRest objectRest;
/**
* An empty read-only JsonList.
*
* @serial exclude
*/
public static final JsonList EMPTY_LIST = new JsonList() {
private static final long serialVersionUID = 1L;
@Override /* List */
public void add(int location, Object object) {
throw new UnsupportedOperationException("Not supported on read-only object.");
}
@Override /* List */
public ListIterator<Object> listIterator(final int location) {
return Collections.emptyList().listIterator(location);
}
@Override /* List */
public Object remove(int location) {
throw new UnsupportedOperationException("Not supported on read-only object.");
}
@Override /* List */
public Object set(int location, Object object) {
throw new UnsupportedOperationException("Not supported on read-only object.");
}
@Override /* List */
public List<Object> subList(int start, int end) {
return Collections.emptyList().subList(start, end);
}
};
//------------------------------------------------------------------------------------------------------------------
// Constructors
//------------------------------------------------------------------------------------------------------------------
/**
* Construct an empty list.
*/
public JsonList() {}
/**
* Construct an empty list with the specified bean context.
*
* @param session The bean session to use for creating beans.
*/
public JsonList(BeanSession session) {
this.session = session;
}
/**
* Construct a list initialized with the specified list.
*
* @param copyFrom
* The list to copy.
* <br>Can be <jk>null</jk>.
*/
public JsonList(Collection<?> copyFrom) {
super(copyFrom);
}
/**
* Construct a list initialized with the specified JSON.
*
* @param json
* The JSON text to parse.
* <br>Can be normal or simplified JSON.
* @throws ParseException Malformed input encountered.
*/
public JsonList(CharSequence json) throws ParseException {
this(json, JsonParser.DEFAULT);
}
/**
* Construct a list initialized with the specified string.
*
* @param in
* The input being parsed.
* <br>Can be <jk>null</jk>.
* @param p
* The parser to use to parse the input.
* <br>If <jk>null</jk>, uses {@link JsonParser}.
* @throws ParseException Malformed input encountered.
*/
public JsonList(CharSequence in, Parser p) throws ParseException {
this(p == null ? BeanContext.DEFAULT_SESSION : p.getBeanContext().getSession());
if (p == null)
p = JsonParser.DEFAULT;
if (in != null)
p.parseIntoCollection(in, this, bs().object());
}
/**
* Construct a list initialized with the specified reader containing JSON.
*
* @param json
* The reader containing JSON text to parse.
* <br>Can contain normal or simplified JSON.
* @throws ParseException Malformed input encountered.
*/
public JsonList(Reader json) throws ParseException {
parse(json, JsonParser.DEFAULT);
}
/**
* Construct a list initialized with the specified string.
*
* @param in
* The reader containing the input being parsed.
* <br>Can contain normal or simplified JSON.
* @param p
* The parser to use to parse the input.
* <br>If <jk>null</jk>, uses {@link JsonParser}.
* @throws ParseException Malformed input encountered.
*/
public JsonList(Reader in, Parser p) throws ParseException {
this(p == null ? BeanContext.DEFAULT_SESSION : p.getBeanContext().getSession());
parse(in, p);
}
/**
* Construct a list initialized with the contents.
*
* @param entries The entries to add to this list.
*/
public JsonList(Object... entries) {
Collections.addAll(this, entries);
}
//------------------------------------------------------------------------------------------------------------------
// Creators
//------------------------------------------------------------------------------------------------------------------
/**
* Construct an empty list.
*
* @return An empty list.
*/
public static JsonList create() {
return new JsonList();
}
/**
* Construct a list initialized with the specified list.
*
* @param values
* The list to copy.
* <br>Can be <jk>null</jk>.
* @return A new list or <jk>null</jk> if the list was <jk>null</jk>.
*/
public static JsonList of(Collection<?> values) {
return values == null ? null : new JsonList(values);
}
/**
* Convenience method for creating a list of collection objects.
*
* @param values The initial values.
* @return A new list.
*/
public static JsonList ofCollections(Collection<?>...values) {
JsonList l = new JsonList();
for (Collection<?> v : values)
l.add(v);
return l;
}
/**
* Convenience method for creating a list of array objects.
*
* @param values The initial values.
* @return A new list.
*/
public static JsonList ofArrays(Object[]...values) {
JsonList l = new JsonList();
for (Object[] v : values)
l.add(v);
return l;
}
/**
* Construct a list initialized with the specified JSON string.
*
* @param json
* The JSON text to parse.
* <br>Can be normal or simplified JSON.
* @return A new list or <jk>null</jk> if the string was null.
* @throws ParseException Malformed input encountered.
*/
public static JsonList ofJson(CharSequence json) throws ParseException {
return json == null ? null : new JsonList(json);
}
/**
* Construct a list initialized with the specified string.
*
* @param in
* The input being parsed.
* <br>Can be <jk>null</jk>.
* @param p
* The parser to use to parse the input.
* <br>If <jk>null</jk>, uses {@link JsonParser}.
* @return A new list or <jk>null</jk> if the input was <jk>null</jk>.
* @throws ParseException Malformed input encountered.
*/
public static JsonList ofText(CharSequence in, Parser p) throws ParseException {
return in == null ? null : new JsonList(in, p);
}
/**
* Construct a list initialized with the specified reader containing JSON.
*
* @param json
* The reader containing JSON text to parse.
* <br>Can contain normal or simplified JSON.
* @return A new list or <jk>null</jk> if the input was <jk>null</jk>.
* @throws ParseException Malformed input encountered.
*/
public static JsonList ofJson(Reader json) throws ParseException {
return json == null ? null : new JsonList(json);
}
/**
* Construct a list initialized with the specified string.
*
* @param in
* The reader containing the input being parsed.
* <br>Can contain normal or simplified JSON.
* @param p
* The parser to use to parse the input.
* <br>If <jk>null</jk>, uses {@link JsonParser}.
* @return A new list or <jk>null</jk> if the input was <jk>null</jk>.
* @throws ParseException Malformed input encountered.
*/
public static JsonList ofText(Reader in, Parser p) throws ParseException {
return in == null ? null : new JsonList(in);
}
/**
* Construct a list initialized with the specified values.
*
* @param values The values to add to this list.
* @return A new list, never <jk>null</jk>.
*/
public static JsonList of(Object... values) {
return new JsonList(values);
}
//------------------------------------------------------------------------------------------------------------------
// Initializers
//------------------------------------------------------------------------------------------------------------------
/**
* Override the default bean session used for converting POJOs.
*
* <p>
* Default is {@link BeanContext#DEFAULT}, which is sufficient in most cases.
*
* <p>
* Useful if you're serializing/parsing beans with transforms defined.
*
* @param session The new bean session.
* @return This object.
*/
public JsonList session(BeanSession session) {
this.session = session;
return this;
}
//------------------------------------------------------------------------------------------------------------------
// Appenders
//------------------------------------------------------------------------------------------------------------------
/**
* Adds the value to this list.
*
* @param value The value to add to this list.
* @return This object.
*/
public JsonList append(Object value) {
add(value);
return this;
}
/**
* Adds all the values in the specified array to this list.
*
* @param values The values to add to this list.
* @return This object.
*/
public JsonList append(Object...values) {
Collections.addAll(this, values);
return this;
}
/**
* Adds all the values in the specified collection to this list.
*
* @param values The values to add to this list.
* @return This object.
*/
public JsonList append(Collection<?> values) {
if (values != null)
addAll(values);
return this;
}
/**
* Adds an entry to this list if the boolean flag is <jk>true</jk>.
*
* @param flag The boolean flag.
* @param value The value to add.
* @return This object.
*/
public JsonList appendIf(boolean flag, Object value) {
if (flag)
append(value);
return this;
}
/**
* Adds all the entries in the specified collection to this list in reverse order.
*
* @param values The collection to add to this list.
* @return This object.
*/
public JsonList appendReverse(List<?> values) {
for (ListIterator<?> i = values.listIterator(values.size()); i.hasPrevious();)
add(i.previous());
return this;
}
/**
* Adds the contents of the array to the list in reverse order.
*
* <p>
* i.e. add values from the array from end-to-start order to the end of the list.
*
* @param values The collection to add to this list.
* @return This object.
*/
public JsonList appendReverse(Object...values) {
for (int i = values.length - 1; i >= 0; i--)
add(values[i]);
return this;
}
/**
* Add if predicate matches.
*
* @param <T> The type being tested.
* @param test The predicate to match against.
* @param value The value to add if the predicate matches.
* @return This object.
*/
public <T> JsonList appendIf(Predicate<T> test, T value) {
return appendIf(test(test, value), value);
}
//------------------------------------------------------------------------------------------------------------------
// Retrievers
//------------------------------------------------------------------------------------------------------------------
/**
* Get the entry at the specified index, converted to the specified type.
*
* <p>
* This is the preferred get method for simple types.
*
* <h5 class='section'>Examples:</h5>
* <p class='bjava'>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"..."</js>);
*
* <jc>// Value converted to a string.</jc>
* String <jv>string</jv> = <jv>list</jv>.get(1, String.<jk>class</jk>);
*
* <jc>// Value converted to a bean.</jc>
* MyBean <jv>bean</jv> = <jv>list</jv>.get(2, MyBean.<jk>class</jk>);
*
* <jc>// Value converted to a bean array.</jc>
* MyBean[] <jv>beanArray</jv> = <jv>list</jv>.get(3, MyBean[].<jk>class</jk>);
*
* <jc>// Value converted to a linked-list of objects.</jc>
* List <jv>list2</jv> = <jv>list</jv>.get(4, LinkedList.<jk>class</jk>);
*
* <jc>// Value converted to a map of object keys/values.</jc>
* Map <jv>map</jv> = <jv>list</jv>.get(5, TreeMap.<jk>class</jk>);
* </p>
*
* <p>
* See {@link BeanSession#convertToType(Object, ClassMeta)} for the list of valid data conversions.
*
* @param index The index into this list.
* @param type The type of object to convert the entry to.
* @param <T> The type of object to convert the entry to.
* @return The converted entry.
*/
public <T> T get(int index, Class<T> type) {
return bs().convertToType(get(index), type);
}
/**
* Get the entry at the specified index, converted to the specified type.
*
* <p>
* The type can be a simple type (e.g. beans, strings, numbers) or parameterized type (collections/maps).
*
* <h5 class='section'>Examples:</h5>
* <p class='bjava'>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"..."</js>);
*
* <jc>// Value converted to a linked-list of strings.</jc>
* List<String> <jv>list1</jv> = <jv>list</jv>.get(1, LinkedList.<jk>class</jk>, String.<jk>class</jk>);
*
* <jc>// Value converted to a linked-list of beans.</jc>
* List<MyBean> <jv>list2</jv> = <jv>list</jv>.get(2, LinkedList.<jk>class</jk>, MyBean.<jk>class</jk>);
*
* <jc>// Value converted to a linked-list of linked-lists of strings.</jc>
* List<List<String>> <jv>list3</jv> = <jv>list</jv>.get(3, LinkedList.<jk>class</jk>, LinkedList.<jk>class</jk>, String.<jk>class</jk>);
*
* <jc>// Value converted to a map of string keys/values.</jc>
* Map<String,String> <jv>map1</jv> = <jv>list</jv>.get(4, TreeMap.<jk>class</jk>, String.<jk>class</jk>, String.<jk>class</jk>);
*
* <jc>// Value converted to a map containing string keys and values of lists containing beans.</jc>
* Map<String,List<MyBean>> <jv>map2</jv> = <jv>list</jv>.get(5, TreeMap.<jk>class</jk>, String.<jk>class</jk>, List.<jk>class</jk>, MyBean.<jk>class</jk>);
* </p>
*
* <p>
* <c>Collection</c> classes are assumed to be followed by zero or one objects indicating the element type.
*
* <p>
* <c>Map</c> classes are assumed to be followed by zero or two meta objects indicating the key and value types.
*
* <p>
* The array can be arbitrarily long to indicate arbitrarily complex data structures.
*
* <p>
* See {@link BeanSession#convertToType(Object, ClassMeta)} for the list of valid data conversions.
*
* @param index The index into this list.
* @param type The type of object to convert the entry to.
* @param args The type arguments of the type to convert the entry to.
* @param <T> The type of object to convert the entry to.
* @return The converted entry.
*/
public <T> T get(int index, Type type, Type...args) {
return bs().convertToType(get(index), type, args);
}
/**
* Shortcut for calling <code>get(index, String.<jk>class</jk>)</code>.
*
* @param index The index.
* @return The converted value.
*/
public String getString(int index) {
return get(index, String.class);
}
/**
* Shortcut for calling <code>get(index, Integer.<jk>class</jk>)</code>.
*
* @param index The index.
* @return The converted value.
* @throws InvalidDataConversionException If value cannot be converted.
*/
public Integer getInt(int index) {
return get(index, Integer.class);
}
/**
* Shortcut for calling <code>get(index, Boolean.<jk>class</jk>)</code>.
*
* @param index The index.
* @return The converted value.
* @throws InvalidDataConversionException If value cannot be converted.
*/
public Boolean getBoolean(int index) {
return get(index, Boolean.class);
}
/**
* Shortcut for calling <code>get(index, Long.<jk>class</jk>)</code>.
*
* @param index The index.
* @return The converted value.
* @throws InvalidDataConversionException If value cannot be converted.
*/
public Long getLong(int index) {
return get(index, Long.class);
}
/**
* Shortcut for calling <code>get(index, JsonMap.<jk>class</jk>)</code>.
*
* @param index The index.
* @return The converted value.
* @throws InvalidDataConversionException If value cannot be converted.
*/
public JsonMap getMap(int index) {
return get(index, JsonMap.class);
}
/**
* Same as {@link #getMap(int)} except converts the keys and values to the specified types.
*
* @param <K> The key type class.
* @param <V> The value type class.
* @param index The index.
* @param keyType The key type class.
* @param valType The value type class.
* @return The converted value.
* @throws InvalidDataConversionException If value cannot be converted.
*/
public <K,V> Map<K,V> getMap(int index, Class<K> keyType, Class<V> valType) {
return bs().convertToType(get(index), Map.class, keyType, valType);
}
/**
* Shortcut for calling <code>get(index, JsonList.<jk>class</jk>)</code>.
*
* @param index The index.
* @return The converted value.
* @throws InvalidDataConversionException If value cannot be converted.
*/
public JsonList getList(int index) {
return get(index, JsonList.class);
}
/**
* Same as {@link #getList(int)} except converts the elements to the specified types.
*
* @param <E> The element type.
* @param index The index.
* @param elementType The element type class.
* @return The converted value.
* @throws InvalidDataConversionException If value cannot be converted.
*/
public <E> List<E> getList(int index, Class<E> elementType) {
return bs().convertToType(get(index), List.class, elementType);
}
//------------------------------------------------------------------------------------------------------------------
// POJO REST methods.
//------------------------------------------------------------------------------------------------------------------
/**
* Same as {@link #get(int,Class) get(int,Class)}, but the key is a slash-delimited path used to traverse entries in
* this POJO.
*
* <p>
* For example, the following code is equivalent:
* </p>
* <p class='bjava'>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"..."</js>);
*
* <jc>// Long way</jc>
* <jk>long</jk> <jv>long1</jv> = <jv>list</jv>.getMap(<js>"0"</js>).getLong(<js>"baz"</js>);
*
* <jc>// Using this method</jc>
* <jk>long</jk> <jv>long2</jv> = <jv>list</jv>.getAt(<js>"0/baz"</js>, <jk>long</jk>.<jk>class</jk>);
* </p>
*
* <p>
* This method uses the {@link ObjectRest} class to perform the lookup, so the map can contain any of the various
* class types that the {@link ObjectRest} class supports (e.g. beans, collections, arrays).
*
* @param path The path to the entry.
* @param type The class type.
*
* @param <T> The class type.
* @return The value, or <jk>null</jk> if the entry doesn't exist.
*/
public <T> T getAt(String path, Class<T> type) {
return getObjectRest().get(path, type);
}
/**
* Same as {@link #getAt(String,Class)}, but allows for conversion to complex maps and collections.
*
* @param path The path to the entry.
* @param type The class type.
* @param args The class parameter types.
*
* @param <T> The class type.
* @return The value, or <jk>null</jk> if the entry doesn't exist.
*/
public <T> T getAt(String path, Type type, Type...args) {
return getObjectRest().get(path, type, args);
}
/**
* Same as {@link #set(int,Object) set(int,Object)}, but the key is a slash-delimited path used to traverse entries
* in this POJO.
*
* <p>
* For example, the following code is equivalent:
* </p>
* <p class='bjava'>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"..."</js>);
*
* <jc>// Long way</jc>
* <jv>list</jv>.getMap(<js>"0"</js>).put(<js>"baz"</js>, 123);
*
* <jc>// Using this method</jc>
* <jv>list</jv>.putAt(<js>"0/baz"</js>, 123);
* </p>
*
* <p>
* This method uses the {@link ObjectRest} class to perform the lookup, so the map can contain any of the various
* class types that the {@link ObjectRest} class supports (e.g. beans, collections, arrays).
*
* @param path The path to the entry.
* @param o The new value.
* @return The previous value, or <jk>null</jk> if the entry doesn't exist.
*/
public Object putAt(String path, Object o) {
return getObjectRest().put(path, o);
}
/**
* Similar to {@link #putAt(String,Object) putAt(String,Object)}, but used to append to collections and arrays.
*
* <p>
* For example, the following code is equivalent:
* </p>
* <p class='bjava'>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"..."</js>);
*
* <jc>// Long way</jc>
* <jv>list</jv>.getMap(0).getList(<js>"bar"</js>).append(123);
*
* <jc>// Using this method</jc>
* <jv>list</jv>.postAt(<js>"0/bar"</js>, 123);
* </p>
*
* <p>
* This method uses the {@link ObjectRest} class to perform the lookup, so the map can contain any of the various
* class types that the {@link ObjectRest} class supports (e.g. beans, collections, arrays).
*
* @param path The path to the entry.
* @param o The new value.
* @return The previous value, or <jk>null</jk> if the entry doesn't exist.
*/
public Object postAt(String path, Object o) {
return getObjectRest().post(path, o);
}
/**
* Similar to {@link #remove(int) remove(int)},but the key is a slash-delimited path used to traverse entries in
* this POJO.
*
* <p>
* For example, the following code is equivalent:
* </p>
* <p class='bjava'>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"..."</js>);
*
* <jc>// Long way</jc>
* <jv>list</jv>.getMap(0).getList(<js>"bar"</js>).delete(0);
*
* <jc>// Using this method</jc>
* <jv>list</jv>.deleteAt(<js>"0/bar/0"</js>);
* </p>
*
* <p>
* This method uses the {@link ObjectRest} class to perform the lookup, so the map can contain any of the various
* class types that the {@link ObjectRest} class supports (e.g. beans, collections, arrays).
*
* @param path The path to the entry.
* @return The previous value, or <jk>null</jk> if the entry doesn't exist.
*/
public Object deleteAt(String path) {
return getObjectRest().delete(path);
}
//------------------------------------------------------------------------------------------------------------------
// Other methods
//------------------------------------------------------------------------------------------------------------------
/**
* Returns the {@link BeanSession} currently associated with this list.
*
* @return The {@link BeanSession} currently associated with this list.
*/
public BeanSession getBeanSession() {
return session;
}
/**
* Sets the {@link BeanSession} currently associated with this list.
*
* @param value The {@link BeanSession} currently associated with this list.
* @return This object.
*/
public JsonList setBeanSession(BeanSession value) {
this.session = value;
return this;
}
/**
* Creates an {@link Iterable} with elements of the specified child type.
*
* <p>
* Attempts to convert the child objects to the correct type if they aren't already the correct type.
*
* <p>
* The <c>next()</c> method on the returned iterator may throw a {@link InvalidDataConversionException} if
* the next element cannot be converted to the specified type.
*
* <p>
* See {@link BeanSession#convertToType(Object, ClassMeta)} for a description of valid conversions.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Iterate over a list of JsonMaps.</jc>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[{foo:'bar'},{baz:123}]"</js>);
* <jk>for</jk> (JsonMap <jv>map</jv> : <jv>list</jv>.elements(JsonMap.<jk>class</jk>)) {
* <jc>// Do something with map.</jc>
* }
*
* <jc>// Iterate over a list of ints.</jc>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[1,2,3]"</js>);
* <jk>for</jk> (Integer <jv>i</jv> : <jv>list</jv>.elements(Integer.<jk>class</jk>)) {
* <jc>// Do something with i.</jc>
* }
*
* <jc>// Iterate over a list of beans.</jc>
* <jc>// Automatically converts to beans.</jc>
* JsonList <jv>list</jv> = JsonList.<jsm>ofJson</jsm>(<js>"[{name:'John Smith',age:45}]"</js>);
* <jk>for</jk> (Person <jv>p</jv> : <jv>list</jv>.elements(Person.<jk>class</jk>)) {
* <jc>// Do something with p.</jc>
* }
* </p>
*
* @param <E> The child object type.
* @param childType The child object type.
* @return A new <c>Iterable</c> object over this list.
*/
public <E> Iterable<E> elements(final Class<E> childType) {
final Iterator<?> iterator = iterator();
return () -> new Iterator<>() {
@Override /* Iterator */
public boolean hasNext() {
return iterator.hasNext();
}
@Override /* Iterator */
public E next() {
return bs().convertToType(iterator.next(), childType);
}
@Override /* Iterator */
public void remove() {
iterator.remove();
}
};
}
/**
* Returns the {@link ClassMeta} of the class of the object at the specified index.
*
* @param index An index into this list, zero-based.
* @return The data type of the object at the specified index, or <jk>null</jk> if the value is null.
*/
public ClassMeta<?> getClassMeta(int index) {
return bs().getClassMetaForObject(get(index));
}
/**
* Serialize this array to a string using the specified serializer.
*
* @param serializer The serializer to use to convert this object to a string.
* @return This object as a serialized string.
*/
public String asString(WriterSerializer serializer) {
return serializer.toString(this);
}
/**
* Serialize this array to Simplified JSON.
*
* @return This object as a serialized string.
*/
public String asString() {
return Json5Serializer.DEFAULT.toString(this);
}
/**
* Returns <jk>true</jk> if this list is unmodifiable.
*
* @return <jk>true</jk> if this list is unmodifiable.
*/
public boolean isUnmodifiable() {
return false;
}
/**
* Returns a modifiable copy of this list if it's unmodifiable.
*
* @return A modifiable copy of this list if it's unmodifiable, or this list if it is already modifiable.
*/
public JsonList modifiable() {
if (isUnmodifiable())
return new JsonList(this);
return this;
}
/**
* Returns an unmodifiable copy of this list if it's modifiable.
*
* @return An unmodifiable copy of this list if it's modifiable, or this list if it is already unmodifiable.
*/
public JsonList unmodifiable() {
if (this instanceof UnmodifiableJsonList)
return this;
return new UnmodifiableJsonList(this);
}
/**
* Convenience method for serializing this JsonList to the specified Writer using the JsonSerializer.DEFAULT
* serializer.
*
* @param w The writer to send the serialized contents of this object.
* @return This object.
* @throws IOException If a problem occurred trying to write to the writer.
* @throws SerializeException If a problem occurred trying to convert the output.
*/
public JsonList writeTo(Writer w) throws IOException, SerializeException {
JsonSerializer.DEFAULT.serialize(this, w);
return this;
}
/**
* Converts this object into the specified class type.
*
* <p>
* TODO - The current implementation is very inefficient.
*
* @param cm The class type to convert this object to.
* @return A converted object.
*/
public Object cast(ClassMeta<?> cm) {
try {
return JsonParser.DEFAULT.parse(Json5Serializer.DEFAULT.serialize(this), cm);
} catch (ParseException | SerializeException e) {
throw asRuntimeException(e);
}
}
//------------------------------------------------------------------------------------------------------------------
// Utility methods
//------------------------------------------------------------------------------------------------------------------
private void parse(Reader r, Parser p) throws ParseException {
if (p == null)
p = JsonParser.DEFAULT;
p.parseIntoCollection(r, this, bs().object());
}
private ObjectRest getObjectRest() {
if (objectRest == null)
objectRest = new ObjectRest(this);
return objectRest;
}
BeanSession bs() {
if (session == null)
session = BeanContext.DEFAULT_SESSION;
return session;
}
private static class UnmodifiableJsonList extends JsonList {
private static final long serialVersionUID = 1L;
@SuppressWarnings("synthetic-access")
UnmodifiableJsonList(JsonList contents) {
if (contents != null)
this.forEach(super::add);
}
@Override /* List */
public void add(int location, Object object) {
throw new UnsupportedOperationException("Not supported on read-only object.");
}
@Override /* List */
public Object remove(int location) {
throw new UnsupportedOperationException("Not supported on read-only object.");
}
@Override /* List */
public Object set(int location, Object object) {
throw new UnsupportedOperationException("Not supported on read-only object.");
}
@Override
public boolean isUnmodifiable() {
return true;
}
}
//------------------------------------------------------------------------------------------------------------------
// Overridden methods.
//------------------------------------------------------------------------------------------------------------------
/**
* A synonym for {@link #toString()}
*
* @return This object as a JSON string.
*/
public String asJson() {
return toString();
}
@Override /* Object */
public String toString() {
return Json5.of(this);
}
}
|
googleapis/google-cloud-java | 37,488 | java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/WriteUserEventRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1beta/user_event_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1beta;
/**
*
*
* <pre>
* Request message for WriteUserEvent method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.WriteUserEventRequest}
*/
public final class WriteUserEventRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.WriteUserEventRequest)
WriteUserEventRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use WriteUserEventRequest.newBuilder() to construct.
private WriteUserEventRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WriteUserEventRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new WriteUserEventRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.UserEventServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_WriteUserEventRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.UserEventServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_WriteUserEventRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest.class,
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name.
* If the write user event action is applied in
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore] level, the
* format is:
* `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`.
* If the write user event action is applied in [Location][] level, for
* example, the event with
* [Document][google.cloud.discoveryengine.v1beta.Document] across multiple
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore], the format is:
* `projects/{project}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* If the write user event action is applied in
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore] level, the
* format is:
* `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`.
* If the write user event action is applied in [Location][] level, for
* example, the event with
* [Document][google.cloud.discoveryengine.v1beta.Document] across multiple
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore], the format is:
* `projects/{project}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int USER_EVENT_FIELD_NUMBER = 2;
private com.google.cloud.discoveryengine.v1beta.UserEvent userEvent_;
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the userEvent field is set.
*/
@java.lang.Override
public boolean hasUserEvent() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The userEvent.
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.UserEvent getUserEvent() {
return userEvent_ == null
? com.google.cloud.discoveryengine.v1beta.UserEvent.getDefaultInstance()
: userEvent_;
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.UserEventOrBuilder getUserEventOrBuilder() {
return userEvent_ == null
? com.google.cloud.discoveryengine.v1beta.UserEvent.getDefaultInstance()
: userEvent_;
}
public static final int WRITE_ASYNC_FIELD_NUMBER = 3;
private boolean writeAsync_ = false;
/**
*
*
* <pre>
* If set to true, the user event is written asynchronously after
* validation, and the API responds without waiting for the write.
* </pre>
*
* <code>bool write_async = 3;</code>
*
* @return The writeAsync.
*/
@java.lang.Override
public boolean getWriteAsync() {
return writeAsync_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getUserEvent());
}
if (writeAsync_ != false) {
output.writeBool(3, writeAsync_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUserEvent());
}
if (writeAsync_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, writeAsync_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest other =
(com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasUserEvent() != other.hasUserEvent()) return false;
if (hasUserEvent()) {
if (!getUserEvent().equals(other.getUserEvent())) return false;
}
if (getWriteAsync() != other.getWriteAsync()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasUserEvent()) {
hash = (37 * hash) + USER_EVENT_FIELD_NUMBER;
hash = (53 * hash) + getUserEvent().hashCode();
}
hash = (37 * hash) + WRITE_ASYNC_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getWriteAsync());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for WriteUserEvent method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.WriteUserEventRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.WriteUserEventRequest)
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.UserEventServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_WriteUserEventRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.UserEventServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_WriteUserEventRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest.class,
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUserEventFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
userEvent_ = null;
if (userEventBuilder_ != null) {
userEventBuilder_.dispose();
userEventBuilder_ = null;
}
writeAsync_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1beta.UserEventServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_WriteUserEventRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest build() {
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest buildPartial() {
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest result =
new com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.userEvent_ = userEventBuilder_ == null ? userEvent_ : userEventBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.writeAsync_ = writeAsync_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest) {
return mergeFrom((com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest other) {
if (other
== com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasUserEvent()) {
mergeUserEvent(other.getUserEvent());
}
if (other.getWriteAsync() != false) {
setWriteAsync(other.getWriteAsync());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUserEventFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
writeAsync_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name.
* If the write user event action is applied in
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore] level, the
* format is:
* `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`.
* If the write user event action is applied in [Location][] level, for
* example, the event with
* [Document][google.cloud.discoveryengine.v1beta.Document] across multiple
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore], the format is:
* `projects/{project}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* If the write user event action is applied in
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore] level, the
* format is:
* `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`.
* If the write user event action is applied in [Location][] level, for
* example, the event with
* [Document][google.cloud.discoveryengine.v1beta.Document] across multiple
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore], the format is:
* `projects/{project}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* If the write user event action is applied in
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore] level, the
* format is:
* `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`.
* If the write user event action is applied in [Location][] level, for
* example, the event with
* [Document][google.cloud.discoveryengine.v1beta.Document] across multiple
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore], the format is:
* `projects/{project}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name.
* If the write user event action is applied in
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore] level, the
* format is:
* `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`.
* If the write user event action is applied in [Location][] level, for
* example, the event with
* [Document][google.cloud.discoveryengine.v1beta.Document] across multiple
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore], the format is:
* `projects/{project}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name.
* If the write user event action is applied in
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore] level, the
* format is:
* `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`.
* If the write user event action is applied in [Location][] level, for
* example, the event with
* [Document][google.cloud.discoveryengine.v1beta.Document] across multiple
* [DataStore][google.cloud.discoveryengine.v1beta.DataStore], the format is:
* `projects/{project}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.discoveryengine.v1beta.UserEvent userEvent_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1beta.UserEvent,
com.google.cloud.discoveryengine.v1beta.UserEvent.Builder,
com.google.cloud.discoveryengine.v1beta.UserEventOrBuilder>
userEventBuilder_;
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the userEvent field is set.
*/
public boolean hasUserEvent() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The userEvent.
*/
public com.google.cloud.discoveryengine.v1beta.UserEvent getUserEvent() {
if (userEventBuilder_ == null) {
return userEvent_ == null
? com.google.cloud.discoveryengine.v1beta.UserEvent.getDefaultInstance()
: userEvent_;
} else {
return userEventBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUserEvent(com.google.cloud.discoveryengine.v1beta.UserEvent value) {
if (userEventBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
userEvent_ = value;
} else {
userEventBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUserEvent(
com.google.cloud.discoveryengine.v1beta.UserEvent.Builder builderForValue) {
if (userEventBuilder_ == null) {
userEvent_ = builderForValue.build();
} else {
userEventBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUserEvent(com.google.cloud.discoveryengine.v1beta.UserEvent value) {
if (userEventBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& userEvent_ != null
&& userEvent_
!= com.google.cloud.discoveryengine.v1beta.UserEvent.getDefaultInstance()) {
getUserEventBuilder().mergeFrom(value);
} else {
userEvent_ = value;
}
} else {
userEventBuilder_.mergeFrom(value);
}
if (userEvent_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUserEvent() {
bitField0_ = (bitField0_ & ~0x00000002);
userEvent_ = null;
if (userEventBuilder_ != null) {
userEventBuilder_.dispose();
userEventBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1beta.UserEvent.Builder getUserEventBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUserEventFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1beta.UserEventOrBuilder getUserEventOrBuilder() {
if (userEventBuilder_ != null) {
return userEventBuilder_.getMessageOrBuilder();
} else {
return userEvent_ == null
? com.google.cloud.discoveryengine.v1beta.UserEvent.getDefaultInstance()
: userEvent_;
}
}
/**
*
*
* <pre>
* Required. User event to write.
* </pre>
*
* <code>
* optional .google.cloud.discoveryengine.v1beta.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1beta.UserEvent,
com.google.cloud.discoveryengine.v1beta.UserEvent.Builder,
com.google.cloud.discoveryengine.v1beta.UserEventOrBuilder>
getUserEventFieldBuilder() {
if (userEventBuilder_ == null) {
userEventBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1beta.UserEvent,
com.google.cloud.discoveryengine.v1beta.UserEvent.Builder,
com.google.cloud.discoveryengine.v1beta.UserEventOrBuilder>(
getUserEvent(), getParentForChildren(), isClean());
userEvent_ = null;
}
return userEventBuilder_;
}
private boolean writeAsync_;
/**
*
*
* <pre>
* If set to true, the user event is written asynchronously after
* validation, and the API responds without waiting for the write.
* </pre>
*
* <code>bool write_async = 3;</code>
*
* @return The writeAsync.
*/
@java.lang.Override
public boolean getWriteAsync() {
return writeAsync_;
}
/**
*
*
* <pre>
* If set to true, the user event is written asynchronously after
* validation, and the API responds without waiting for the write.
* </pre>
*
* <code>bool write_async = 3;</code>
*
* @param value The writeAsync to set.
* @return This builder for chaining.
*/
public Builder setWriteAsync(boolean value) {
writeAsync_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* If set to true, the user event is written asynchronously after
* validation, and the API responds without waiting for the write.
* </pre>
*
* <code>bool write_async = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearWriteAsync() {
bitField0_ = (bitField0_ & ~0x00000004);
writeAsync_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.WriteUserEventRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.WriteUserEventRequest)
private static final com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest();
}
public static com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<WriteUserEventRequest> PARSER =
new com.google.protobuf.AbstractParser<WriteUserEventRequest>() {
@java.lang.Override
public WriteUserEventRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<WriteUserEventRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<WriteUserEventRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.WriteUserEventRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/sentry | 37,126 | sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sentry.tests.e2e.hdfs;
import java.io.File;
import java.net.URI;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.Statement;
import org.apache.sentry.core.common.utils.PathUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.sentry.hdfs.PathsUpdate;
import org.apache.sentry.tests.e2e.hive.StaticUserGroup;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.metastore.api.Table;
/**
* Advanced tests for HDFS Sync integration
*/
public class TestHDFSIntegrationAdvanced extends TestHDFSIntegrationBase {
private static final Logger LOGGER = LoggerFactory
.getLogger(TestHDFSIntegrationAdvanced.class);
@BeforeClass
public static void setup() throws Exception{
hdfsSyncEnabled = true;
TestHDFSIntegrationBase.setup();
}
@Test
public void testNoPartitionInsert() throws Throwable {
dbNames = new String[]{"db1"};
roles = new String[]{"admin_role", "tab_role"};
admin = "hive";
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant role admin_role to group hive");
stmt.execute("grant all on server server1 to role admin_role");
//Create table and grant select to user flume
stmt.execute("create database db1");
stmt.execute("use db1");
stmt.execute("create table t1 (s string)");
stmt.execute("create role tab_role");
stmt.execute("grant select on table t1 to role tab_role");
stmt.execute("grant role tab_role to group flume");
verifyGroupPermOnAllSubDirs("/user/hive/warehouse/db1.db/t1", FsAction.READ_EXECUTE, "flume", true);
stmt.execute("INSERT INTO TABLE t1 VALUES (1)");
verifyGroupPermOnAllSubDirs("/user/hive/warehouse/db1.db/t1", FsAction.READ_EXECUTE, "flume", true);
}
/**
* Make sure non HDFS paths are not added to the object - location map.
* @throws Throwable
*/
@Test
public void testNonHDFSLocations() throws Throwable {
String dbName = "db2";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "user_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant all on uri 'file:///tmp/external' to role admin_role");
stmt.execute("grant all on uri 'hdfs:///tmp/external' to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
conn = hiveServer2.createConnection(admin, admin);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.close();
conn.close();
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role user_role");
stmt.execute("grant all on database " + dbName + " to role user_role");
stmt.execute("grant role user_role to group " + StaticUserGroup.USERGROUP1);
stmt.close();
conn.close();
conn = hiveServer2.createConnection(admin, admin);
stmt = conn.createStatement();
//External table on local file system
miniDFS.getFileSystem().mkdirs(new Path("/tmp/external/tab1_loc"));
stmt.execute("use " + dbName);
stmt.execute("create external table tab1(a int) location 'file:///tmp/external/tab1_loc'");
verifyGroupPermOnAllSubDirs("/tmp/external/tab1_loc", null, StaticUserGroup.USERGROUP1, false);
//External partitioned table on local file system
miniDFS.getFileSystem().mkdirs(new Path("/tmp/external/tab2_loc/i=1"));
stmt.execute("create external table tab2 (s string) partitioned by (i int) location 'file:///tmp/external/tab2_loc'");
verifyGroupPermOnAllSubDirs("/tmp/external/tab2_loc", null, StaticUserGroup.USERGROUP1, false);
//Partition on local file system
stmt.execute("alter table tab2 add partition (i=1)");
stmt.execute("alter table tab2 partition (i=1) set location 'file:///tmp/external/tab2_loc/i=1'");
verifyGroupPermOnAllSubDirs("/tmp/external/tab2_loc/i=1", null, StaticUserGroup.USERGROUP1, false);
//HDFS to local file system, also make sure does not specifying scheme still works
stmt.execute("create external table tab3(a int) location '/tmp/external/tab3_loc'");
// SENTRY-546
// SENTRY-1471 - fixing the validation logic revealed that FsAction.ALL is the right value.
verifyGroupPermOnAllSubDirs("/tmp/external/tab3_loc", FsAction.ALL, StaticUserGroup.USERGROUP1, true);
// verifyGroupPermOnAllSubDirs("/tmp/external/tab3_loc", null, StaticUserGroup.USERGROUP1, true);
stmt.execute("alter table tab3 set location 'file:///tmp/external/tab3_loc'");
verifyGroupPermOnAllSubDirs("/tmp/external/tab3_loc", null, StaticUserGroup.USERGROUP1, false);
//Local file system to HDFS
stmt.execute("create table tab4(a int) location 'file:///tmp/external/tab4_loc'");
stmt.execute("alter table tab4 set location 'hdfs:///tmp/external/tab4_loc'");
miniDFS.getFileSystem().mkdirs(new Path("/tmp/external/tab4_loc"));
// SENTRY-546
// SENTRY-1471 - fixing the validation logic revealed that FsAction.ALL is the right value.
verifyGroupPermOnAllSubDirs("/tmp/external/tab4_loc", FsAction.ALL, StaticUserGroup.USERGROUP1, true);
// verifyGroupPermOnAllSubDirs("/tmp/external/tab4_loc", null, StaticUserGroup.USERGROUP1, true);
stmt.close();
conn.close();
}
/**
* Make sure when events such as table creation fail, the path should not be sync to NameNode plugin.
*/
@Test
public void testTableCreationFailure() throws Throwable {
String dbName = "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant all on uri 'hdfs:///tmp/external' to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
stmt.execute("grant role admin_role to group " + StaticUserGroup.HIVE);
stmt.close();
conn.close();
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
miniDFS.getFileSystem().setOwner(tmpHDFSDir, "hdfs", "hdfs");
miniDFS.getFileSystem().setPermission(tmpHDFSDir, FsPermission.valueOf("drwxrwx---"));
// Expect table creation to fail because hive:hive does not have
// permission to write at parent directory.
try {
stmt.execute("create external table tab1(a int) location '" + tmpHDFSPartitionStr + "'");
Assert.fail("Expect table creation to fail");
} catch (Exception ex) {
LOGGER.error("Exception when creating table: " + ex.getMessage());
}
// When the table creation failed, the path will not be managed by sentry. And the
// permission of the path will not be hive:hive.
verifyGroupPermOnAllSubDirs("/tmp/external/p1", null, StaticUserGroup.HIVE, true);
stmt.close();
conn.close();
}
/**
* Make sure when events such as add partition fail, the path should not be sync to NameNode plugin.
*/
@Test
public void testAddPartitionFailure() throws Throwable {
String dbName = "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
stmt.close();
conn.close();
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("create external table tab2 (s string) partitioned by (month int)");
// Expect adding partition to fail because hive:hive does not have
// permission to write at parent directory.
miniDFS.getFileSystem().setOwner(tmpHDFSDir, "hdfs", "hdfs");
miniDFS.getFileSystem().setPermission(tmpHDFSDir, FsPermission.valueOf("drwxrwx---"));
try {
stmt.execute("alter table tab2 add partition (month = 1) location '" + tmpHDFSPartitionStr + "'");
Assert.fail("Expect adding partition to fail");
} catch (Exception ex) {
LOGGER.error("Exception when adding partition: " + ex.getMessage());
}
// When the table creation failed, the path will not be managed by sentry. And the
// permission of the path will not be hive:hive.
verifyGroupPermOnAllSubDirs("/tmp/external/p1", null, StaticUserGroup.HIVE, true);
stmt.close();
conn.close();
}
/**
* Make sure when events such as drop table fail, the path should not be sync to NameNode plugin.
*/
@Test
public void testDropTableFailure() throws Throwable {
String dbName = "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
stmt.close();
conn.close();
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
LOGGER.info("create external table in " + tmpHDFSPartitionStr);
stmt.execute("create external table tab1(a int) partitioned by (date1 string) location 'hdfs://" + tmpHDFSPartitionStr + "'");
miniDFS.getFileSystem().setOwner(tmpHDFSDir, "hdfs", "hdfs");
miniDFS.getFileSystem().setPermission(tmpHDFSDir, FsPermission.valueOf("drwxrwx---"));
// Expect dropping table to fail because hive:hive does not have
// permission to write at parent directory when
// hive.metastore.authorization.storage.checks property is true.
try {
stmt.execute("set hive.metastore.authorization.storage.checks=true");
stmt.execute("drop table tab1");
Assert.fail("Expect dropping table to fail");
} catch (Exception ex) {
LOGGER.error("Exception when creating table: " + ex.getMessage());
}
// When the table dropping failed, the path will still be managed by sentry. And the
// permission of the path still should be hive:hive.
verifyGroupPermOnAllSubDirs(tmpHDFSPartitionStr, FsAction.ALL, StaticUserGroup.HIVE, true);
stmt.close();
conn.close();
}
/**
* Make sure when events such as drop table fail, the path should not be sync to NameNode plugin.
*/
@Test
public void testDropPartitionFailure() throws Throwable {
String dbName = "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
stmt.close();
conn.close();
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("create table tab3 (s string) partitioned by (month int)");
stmt.execute("alter table tab3 add partition (month = 1) location '" + tmpHDFSPartitionStr + "'");
miniDFS.getFileSystem().setOwner(tmpHDFSDir, "hdfs", "hdfs");
miniDFS.getFileSystem().setPermission(tmpHDFSDir, FsPermission.valueOf("drwxrwx---"));
// Expect dropping partition to fail because because hive:hive does not have
// permission to write at parent directory.
try {
stmt.execute("ALTER TABLE tab3 DROP PARTITION (month = 1)");
Assert.fail("Expect dropping partition to fail");
} catch (Exception ex) {
LOGGER.error("Exception when dropping partition: " + ex.getMessage());
}
// When the partition dropping failed, the path for the partition will still
// be managed by sentry. And the permission of the path still should be hive:hive.
verifyGroupPermOnAllSubDirs(tmpHDFSPartitionStr, FsAction.ALL, StaticUserGroup.HIVE, true);
stmt.close();
conn.close();
}
@Test
public void testURIsWithoutSchemeandAuthority() throws Throwable {
// In the local test environment, EXTERNAL_SENTRY_SERVICE is false,
// set the default URI scheme to be hdfs.
boolean testConfOff = Boolean.valueOf(System.getProperty(EXTERNAL_SENTRY_SERVICE, "false"));
if (!testConfOff) {
PathUtils.getConfiguration().set("fs.defaultFS", fsURI);
}
String dbName= "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "db_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("create role db_role");
stmt.execute("grant all on database " + dbName +" to role db_role");
stmt.execute("grant all on URI '/tmp/external' to role db_role");
stmt.execute("grant role db_role to group " + StaticUserGroup.USERGROUP1);
conn = hiveServer2.createConnection(StaticUserGroup.USER1_1, StaticUserGroup.USER1_1);
stmt = conn.createStatement();
stmt.execute("use " + dbName);
stmt.execute("create external table tab1 (s string) location '/tmp/external'");
stmt.close();
conn.close();
}
/**
* Test combination of "grant all on URI" where URI has scheme,
* followed by "create external table" where location URI has no scheme.
* Neither URI has authority.
*/
@Test
public void testURIsWithAndWithoutSchemeNoAuthority() throws Throwable {
// In the local test environment, EXTERNAL_SENTRY_SERVICE is false,
// set the default URI scheme to be hdfs.
boolean testConfOff = Boolean.valueOf(System.getProperty(EXTERNAL_SENTRY_SERVICE, "false"));
if (!testConfOff) {
PathUtils.getConfiguration().set("fs.defaultFS", fsURI);
}
String dbName= "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "db_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("create role db_role");
stmt.execute("grant all on database " + dbName +" to role db_role");
stmt.execute("grant all on URI 'hdfs:///tmp/external' to role db_role");
stmt.execute("grant role db_role to group " + StaticUserGroup.USERGROUP1);
conn = hiveServer2.createConnection(StaticUserGroup.USER1_1, StaticUserGroup.USER1_1);
stmt = conn.createStatement();
stmt.execute("use " + dbName);
stmt.execute("create external table tab1 (s string) location '/tmp/external'");
stmt.close();
conn.close();
}
/**
* Test combination of "grant all on URI" where URI has no scheme,
* followed by "create external table" where location URI has scheme.
* Neither URI has authority.
*/
@Test
public void testURIsWithoutAndWithSchemeNoAuthority() throws Throwable {
// In the local test environment, EXTERNAL_SENTRY_SERVICE is false,
// set the default URI scheme to be hdfs.
boolean testConfOff = Boolean.valueOf(System.getProperty(EXTERNAL_SENTRY_SERVICE, "false"));
if (!testConfOff) {
PathUtils.getConfiguration().set("fs.defaultFS", fsURI);
}
String dbName= "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "db_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("create role db_role");
stmt.execute("grant all on database " + dbName +" to role db_role");
stmt.execute("grant all on URI '/tmp/external' to role db_role");
stmt.execute("grant role db_role to group " + StaticUserGroup.USERGROUP1);
conn = hiveServer2.createConnection(StaticUserGroup.USER1_1, StaticUserGroup.USER1_1);
stmt = conn.createStatement();
stmt.execute("use " + dbName);
stmt.execute("create external table tab1 (s string) location 'hdfs:///tmp/external'");
stmt.close();
conn.close();
}
/**
* Test combination of "grant all on URI" where URI has scheme and authority,
* followed by "create external table" where location URI has neither scheme nor authority.
*/
@Test
public void testURIsWithAndWithoutSchemeAndAuthority() throws Throwable {
// In the local test environment, EXTERNAL_SENTRY_SERVICE is false,
// set the default URI scheme to be hdfs.
boolean testConfOff = Boolean.valueOf(System.getProperty(EXTERNAL_SENTRY_SERVICE, "false"));
if (!testConfOff) {
PathUtils.getConfiguration().set("fs.defaultFS", fsURI);
}
String dbName= "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "db_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("create role db_role");
stmt.execute("grant all on database " + dbName +" to role db_role");
stmt.execute("grant all on URI 'hdfs://" + new URI(fsURI).getAuthority() + "/tmp/external' to role db_role");
stmt.execute("grant role db_role to group " + StaticUserGroup.USERGROUP1);
conn = hiveServer2.createConnection(StaticUserGroup.USER1_1, StaticUserGroup.USER1_1);
stmt = conn.createStatement();
stmt.execute("use " + dbName);
stmt.execute("create external table tab1 (s string) location '/tmp/external'");
stmt.close();
conn.close();
}
//SENTRY-884
@Test
public void testAccessToTableDirectory() throws Throwable {
String dbName= "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "table_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("use " + dbName);
stmt.execute("create table tb1(a string)");
stmt.execute("create role table_role");
stmt.execute("grant all on table tb1 to role table_role");
stmt.execute("grant role table_role to group " + StaticUserGroup.USERGROUP1);
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
//Verify user1 is able to access table directory
verifyAccessToPath(StaticUserGroup.USER1_1, StaticUserGroup.USERGROUP1, "/user/hive/warehouse/db1.db/tb1", true);
stmt.close();
conn.close();
}
/* SENTRY-953 */
/* SENTRY-1471 - fixing the validation logic revealed that this test is broken.
* Disabling this test for now; to be fixed in a separate JIRA.
*/
@Test
public void testAuthzObjOnPartitionMultipleTables() throws Throwable {
String dbName = "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "tab1_role", "tab2_role", "tab3_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
// Create external table tab1 on location '/tmp/external/p1'.
// Create tab1_role, and grant it with insert permission on table tab1 to user_group1.
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("use " + dbName);
stmt.execute("create external table tab1 (s string) partitioned by (month int) location '/tmp/external/p1'");
stmt.execute("create role tab1_role");
stmt.execute("grant insert on table tab1 to role tab1_role");
stmt.execute("grant role tab1_role to group " + StaticUserGroup.USERGROUP1);
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
// Verify that user_group1 has insert(write_execute) permission on '/tmp/external/p1'.
verifyGroupPermOnAllSubDirs("/tmp/external/p1", FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP1, true);
// Create external table tab2 and partition on location '/tmp/external'.
// Create tab2_role, and grant it with select permission on table tab2 to user_group2.
stmt.execute("create external table tab2 (s string) partitioned by (month int)");
stmt.execute("alter table tab2 add partition (month = 1) location '" + tmpHDFSDirStr + "'");
stmt.execute("create role tab2_role");
stmt.execute("grant select on table tab2 to role tab2_role");
stmt.execute("grant role tab2_role to group " + StaticUserGroup.USERGROUP2);
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
// Verify that user_group2 have select(read_execute) permission on both paths.
verifyGroupPermOnAllSubDirs("/user/hive/warehouse/" + dbName + ".db/tab2", FsAction.READ_EXECUTE, StaticUserGroup.USERGROUP2, true);
verifyGroupPermOnPath(tmpHDFSDirStr, FsAction.READ_EXECUTE, StaticUserGroup.USERGROUP2, true);
// Create table tab3 and partition on the same location '/tmp/external' as tab2.
// Create tab3_role, and grant it with insert permission on table tab3 to user_group3.
stmt.execute("create table tab3 (s string) partitioned by (month int)");
stmt.execute("alter table tab3 add partition (month = 1) location '" + tmpHDFSDirStr + "'");
stmt.execute("create role tab3_role");
stmt.execute("grant insert on table tab3 to role tab3_role");
stmt.execute("grant role tab3_role to group " + StaticUserGroup.USERGROUP3);
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
// When two partitions of different tables pointing to the same location with different grants,
// ACLs should have union (no duplicates) of both rules.
verifyGroupPermOnAllSubDirs("/user/hive/warehouse/" + dbName + ".db/tab3", FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP3, true);
verifyGroupPermOnPath(tmpHDFSDirStr, FsAction.READ_EXECUTE, StaticUserGroup.USERGROUP2, true);
verifyGroupPermOnPath(tmpHDFSDirStr, FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP3, true);
// When alter the table name (tab2 to be tabx), ACLs should remain the same.
stmt.execute("alter table tab2 rename to tabx");
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
verifyGroupPermOnPath(tmpHDFSDirStr, FsAction.READ_EXECUTE, StaticUserGroup.USERGROUP2, true);
verifyGroupPermOnPath(tmpHDFSDirStr, FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP3, true);
// When drop a partition that shares the same location with other partition belonging to
// other table, should still have the other table permissions.
stmt.execute("ALTER TABLE tabx DROP PARTITION (month = 1)");
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
verifyGroupPermOnAllSubDirs("/user/hive/warehouse/" + dbName + ".db/tab3", FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP3, true);
verifyGroupPermOnPath(tmpHDFSDirStr, FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP3, true);
// When drop a table that has a partition shares the same location with other partition
// belonging to other table, should still have the other table permissions.
stmt.execute("DROP TABLE IF EXISTS tabx");
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
verifyGroupPermOnAllSubDirs("/user/hive/warehouse/" + dbName + ".db/tab3", FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP3, true);
verifyGroupPermOnPath(tmpHDFSDirStr, FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP3, true);
stmt.close();
conn.close();
miniDFS.getFileSystem().delete(partitionDir, true);
}
/* SENTRY-953 */
@Test
public void testAuthzObjOnPartitionSameTable() throws Throwable {
String dbName = "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "tab1_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
// Create table tab1 and partition on the same location '/tmp/external/p1'.
// Create tab1_role, and grant it with insert permission on table tab1 to user_group1.
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("use " + dbName);
stmt.execute("create table tab1 (s string) partitioned by (month int)");
stmt.execute("alter table tab1 add partition (month = 1) location '/tmp/external/p1'");
stmt.execute("create role tab1_role");
stmt.execute("grant insert on table tab1 to role tab1_role");
stmt.execute("grant role tab1_role to group " + StaticUserGroup.USERGROUP1);
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
// Verify that user_group1 has insert(write_execute) permission on '/tmp/external/p1'.
verifyGroupPermOnAllSubDirs("/tmp/external/p1", FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP1, true);
// When two partitions of the same table pointing to the same location,
// ACLS should not be repeated. Exception will be thrown if there are duplicates.
stmt.execute("alter table tab1 add partition (month = 2) location '/tmp/external/p1'");
verifyGroupPermOnPath("/tmp/external/p1", FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP1, true);
stmt.close();
conn.close();
}
/* SENTRY-953 */
@Test
public void testAuthzObjOnMultipleTables() throws Throwable {
String dbName = "db1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role", "tab1_role", "tab2_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
// Create external table tab1 on location '/tmp/external/p1'.
// Create tab1_role, and grant it with insert permission on table tab1 to user_group1.
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("use " + dbName);
stmt.execute("create external table tab1 (s string) partitioned by (month int) location '/tmp/external/p1'");
stmt.execute("create role tab1_role");
stmt.execute("grant insert on table tab1 to role tab1_role");
stmt.execute("grant role tab1_role to group " + StaticUserGroup.USERGROUP1);
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
// Verify that user_group1 has insert(write_execute) permission on '/tmp/external/p1'.
verifyGroupPermOnAllSubDirs("/tmp/external/p1", FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP1, true);
// Create table tab2 on the same location '/tmp/external/p1' as table tab1.
// Create tab2_role, and grant it with select permission on table tab2 to user_group1.
stmt.execute("create table tab2 (s string) partitioned by (month int) location '/tmp/external/p1'");
stmt.execute("create role tab2_role");
stmt.execute("grant select on table tab2 to role tab2_role");
stmt.execute("grant role tab2_role to group " + StaticUserGroup.USERGROUP1);
// When two tables pointing to the same location, ACLS should have union (no duplicates)
// of both rules.
verifyGroupPermOnPath("/tmp/external/p1", FsAction.ALL, StaticUserGroup.USERGROUP1, true);
// When drop table tab1, ACLs of tab2 still remain.
stmt.execute("DROP TABLE IF EXISTS tab1");
Thread.sleep(WAIT_BEFORE_TESTVERIFY);//Wait till sentry cache is updated in Namenode
verifyGroupPermOnPath("/tmp/external/p1", FsAction.READ_EXECUTE, StaticUserGroup.USERGROUP1, true);
stmt.close();
conn.close();
}
/**
* SENTRY-1002:
* Ensure the paths with no scheme will not cause NPE during paths update.
*/
@Test
public void testMissingScheme() throws Throwable {
// In the local test environment, EXTERNAL_SENTRY_SERVICE is false,
// set the default URI scheme to be hdfs.
boolean testConfOff = Boolean.valueOf(System.getProperty(EXTERNAL_SENTRY_SERVICE, "false"));
if (!testConfOff) {
PathsUpdate.setDefaultScheme("hdfs");
}
String dbName = "db1";
String tblName = "tab1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role"};
admin = StaticUserGroup.ADMIN1;
Connection conn;
Statement stmt;
conn = hiveServer2.createConnection("hive", "hive");
stmt = conn.createStatement();
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
stmt.close();
conn.close();
conn = hiveServer2.createConnection(StaticUserGroup.ADMIN1, StaticUserGroup.ADMIN1);
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
stmt.execute("create external table " + dbName + "." + tblName + "(s string) location '/tmp/external/p1'");
// Deep copy of table tab1
Table tbCopy = hmsClient.getTable(dbName, tblName);
// Change the location of the table to strip the scheme.
StorageDescriptor sd = hmsClient.getTable(dbName, tblName).getSd();
sd.setLocation("/tmp/external");
tbCopy.setSd(sd);
// Alter table tab1 to be tbCopy which is at scheme-less location.
// And the corresponding path will be updated to sentry server.
hmsClient.alter_table(dbName, "tab1", tbCopy);
// Remove the checking for the location of the table. The HMS will never return scheme-less
// URI locations anymore. However, if any NPE being triggered in future because of any changes,
// the test case will cover it and capture it.
// i.e. hdfs://<localhost>/tmp/external (location with scheme)
// /tmp/external (location without scheme)
// Assert.assertEquals("/tmp/external", hmsClient.getTable(dbName, tblName).getSd().getLocation());
verifyGroupPermOnPath("/tmp/external", FsAction.ALL, StaticUserGroup.HIVE, true);
stmt.close();
conn.close();
}
@Test
public void testRenameHivePartitions() throws Throwable {
final String dbName = "db1";
final String tblName = "tab1";
final String newTblName = "tab2";
final String patName = "pat1";
dbNames = new String[]{dbName};
roles = new String[]{"admin_role"};
admin = StaticUserGroup.ADMIN1;
try (Connection conn = hiveServer2.createConnection("hive", "hive");
Statement stmt = conn.createStatement()) {
stmt.execute("create role admin_role");
stmt.execute("grant all on server server1 to role admin_role");
stmt.execute("grant role admin_role to group " + StaticUserGroup.ADMINGROUP);
}
try (Connection conn = hiveServer2.createConnection(
StaticUserGroup.ADMIN1, StaticUserGroup.ADMINGROUP);
Statement stmt = conn.createStatement()) {
stmt.execute("create database " + dbName);
stmt.execute("use " + dbName);
stmt.execute("create table " + tblName + " (s string) partitioned by (month int) ");
String tblPath = Paths.get("/user/hive/warehouse", dbName + ".db", tblName).toString();
String patPath = Paths.get(tblPath, patName).toString();
stmt.execute("alter table " + tblName + " add partition (month = 1) location '" +
patPath + "'");
stmt.execute("grant all on TABLE " + tblName + " to role admin_role");
stmt.execute("create role user_role");
stmt.execute("grant insert on table " + tblName + " to role user_role");
stmt.execute("grant role user_role to group " + StaticUserGroup.USERGROUP1);
// Rename the hive table
stmt.execute("alter table " + tblName + " rename to " + newTblName);
// Verify that the permissions are preserved.
String newTblPath = Paths.get("/user/hive/warehouse", dbName + ".db", newTblName).toString();
verifyGroupPermOnAllSubDirs(newTblPath, FsAction.ALL, StaticUserGroup.HIVE, true);
verifyGroupPermOnAllSubDirs(newTblPath, FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP1, true);
String newPatPath = new File(newTblPath, patName).toString();
verifyGroupPermOnPath(newPatPath, FsAction.ALL, StaticUserGroup.ADMINGROUP, true);
verifyGroupPermOnPath(newPatPath, FsAction.WRITE_EXECUTE, StaticUserGroup.USERGROUP1, true);
}
}
}
|
apache/lucene | 37,645 | lucene/demo/src/java/org/apache/lucene/demo/facet/SandboxFacetsExample.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.demo.facet;
import static org.apache.lucene.facet.FacetsConfig.DEFAULT_INDEX_FIELD_NAME;
import static org.apache.lucene.sandbox.facet.utils.ComparableUtils.byAggregatedValue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleDocValuesField;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.DrillSideways;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.LabelAndValue;
import org.apache.lucene.facet.MultiLongValuesSource;
import org.apache.lucene.facet.range.LongRange;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.sandbox.facet.FacetFieldCollectorManager;
import org.apache.lucene.sandbox.facet.cutters.TaxonomyFacetsCutter;
import org.apache.lucene.sandbox.facet.cutters.ranges.LongRangeFacetCutter;
import org.apache.lucene.sandbox.facet.iterators.ComparableSupplier;
import org.apache.lucene.sandbox.facet.iterators.OrdinalIterator;
import org.apache.lucene.sandbox.facet.iterators.TaxonomyChildrenOrdinalIterator;
import org.apache.lucene.sandbox.facet.iterators.TopnOrdinalIterator;
import org.apache.lucene.sandbox.facet.labels.RangeOrdToLabel;
import org.apache.lucene.sandbox.facet.labels.TaxonomyOrdLabelBiMap;
import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
import org.apache.lucene.sandbox.facet.recorders.LongAggregationsFacetRecorder;
import org.apache.lucene.sandbox.facet.recorders.MultiFacetsRecorder;
import org.apache.lucene.sandbox.facet.recorders.Reducer;
import org.apache.lucene.sandbox.facet.utils.ComparableUtils;
import org.apache.lucene.sandbox.facet.utils.DrillSidewaysFacetOrchestrator;
import org.apache.lucene.sandbox.facet.utils.FacetBuilder;
import org.apache.lucene.sandbox.facet.utils.FacetOrchestrator;
import org.apache.lucene.sandbox.facet.utils.RangeFacetBuilderFactory;
import org.apache.lucene.sandbox.facet.utils.TaxonomyFacetBuilder;
import org.apache.lucene.search.DoubleValuesSource;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LongValuesSource;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MultiCollectorManager;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollectorManager;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
/** Demo for sandbox faceting. */
public class SandboxFacetsExample {
private final Directory indexDir = new ByteBuffersDirectory();
private final Directory taxoDir = new ByteBuffersDirectory();
private final FacetsConfig config = new FacetsConfig();
private SandboxFacetsExample() {
config.setHierarchical("Publish Date", true);
config.setHierarchical("Author", false);
}
/** Build the example index. */
void index() throws IOException {
IndexWriter indexWriter =
new IndexWriter(
indexDir, new IndexWriterConfig(new WhitespaceAnalyzer()).setOpenMode(OpenMode.CREATE));
// Writes facet ords to a separate directory from the main index
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
Document doc = new Document();
doc.add(new FacetField("Author", "Bob"));
doc.add(new FacetField("Publish Date", "2010", "10", "15"));
doc.add(new NumericDocValuesField("Price", 10));
doc.add(new NumericDocValuesField("Units", 9));
doc.add(new DoubleDocValuesField("Popularity", 3.5d));
indexWriter.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Lisa"));
doc.add(new FacetField("Publish Date", "2010", "10", "20"));
doc.add(new NumericDocValuesField("Price", 4));
doc.add(new NumericDocValuesField("Units", 2));
doc.add(new DoubleDocValuesField("Popularity", 4.1D));
indexWriter.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Lisa"));
doc.add(new FacetField("Publish Date", "2012", "1", "1"));
doc.add(new NumericDocValuesField("Price", 3));
doc.add(new NumericDocValuesField("Units", 5));
doc.add(new DoubleDocValuesField("Popularity", 3.9D));
indexWriter.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Susan"));
doc.add(new FacetField("Publish Date", "2012", "1", "7"));
doc.add(new NumericDocValuesField("Price", 8));
doc.add(new NumericDocValuesField("Units", 7));
doc.add(new DoubleDocValuesField("Popularity", 4D));
indexWriter.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Frank"));
doc.add(new FacetField("Publish Date", "1999", "5", "5"));
doc.add(new NumericDocValuesField("Price", 9));
doc.add(new NumericDocValuesField("Units", 6));
doc.add(new DoubleDocValuesField("Popularity", 4.9D));
indexWriter.addDocument(config.build(taxoWriter, doc));
IOUtils.close(indexWriter, taxoWriter);
}
/**
* Example for {@link FacetBuilder} usage - simple API that provides results in a format very
* similar to classic facets module. It doesn't give all flexibility available with {@link
* org.apache.lucene.sandbox.facet.cutters.FacetCutter} and {@link
* org.apache.lucene.sandbox.facet.recorders.FacetRecorder} though, see below for lower level API
* usage examples.
*/
private List<FacetResult> simpleFacetsWithSearch() throws IOException {
//// init readers and searcher
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
//// build facets requests
FacetBuilder authorFacetBuilder =
new TaxonomyFacetBuilder(config, taxoReader, "Author").withTopN(10);
FacetBuilder priceFacetBuilder =
RangeFacetBuilderFactory.forLongRanges(
"Price",
new LongRange("0-10", 0, true, 10, true),
new LongRange("10-20", 10, true, 20, true));
//// Main hits collector
TopScoreDocCollectorManager hitsCollectorManager =
new TopScoreDocCollectorManager(2, Integer.MAX_VALUE);
//// Search and collect
TopDocs topDocs =
new FacetOrchestrator()
.addBuilder(authorFacetBuilder)
.addBuilder(priceFacetBuilder)
.collect(new MatchAllDocsQuery(), searcher, hitsCollectorManager);
System.out.println(
"Search results: totalHits: "
+ topDocs.totalHits
+ ", collected hits: "
+ topDocs.scoreDocs.length);
//// Results
FacetResult authorResults = authorFacetBuilder.getResult();
FacetResult rangeResults = priceFacetBuilder.getResult();
IOUtils.close(indexReader, taxoReader);
return List.of(authorResults, rangeResults);
}
/** Example for {@link FacetBuilder} usage with {@link DrillSideways}. */
private List<FacetResult> simpleFacetsWithDrillSideways() throws IOException {
//// init readers and searcher
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
DrillSideways ds = new DrillSideways(searcher, config, taxoReader);
//// build facets requests
FacetBuilder authorFacetBuilder =
new TaxonomyFacetBuilder(config, taxoReader, "Author").withTopN(10);
FacetBuilder priceFacetBuilder =
RangeFacetBuilderFactory.forLongRanges(
"Price",
new LongRange("0-10", 0, true, 10, true),
new LongRange("10-20", 10, true, 20, true));
//// Build query and collect
DrillDownQuery query = new DrillDownQuery(config);
query.add("Author", "Lisa");
new DrillSidewaysFacetOrchestrator()
.addDrillDownBuilder(priceFacetBuilder)
.addDrillSidewaysBuilder("Author", authorFacetBuilder)
.collect(query, ds);
//// Results
FacetResult authorResults = authorFacetBuilder.getResult();
FacetResult rangeResults = priceFacetBuilder.getResult();
IOUtils.close(indexReader, taxoReader);
return List.of(authorResults, rangeResults);
}
/** User runs a query and counts facets only without collecting the matching documents. */
List<FacetResult> facetsOnly() throws IOException {
//// (1) init readers and searcher
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
//// (2) init collector
TaxonomyFacetsCutter defaultTaxoCutter =
new TaxonomyFacetsCutter(DEFAULT_INDEX_FIELD_NAME, config, taxoReader);
CountFacetRecorder defaultRecorder = new CountFacetRecorder();
FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
new FacetFieldCollectorManager<>(defaultTaxoCutter, defaultRecorder);
// (2.1) if we need to collect data using multiple different collectors, e.g. taxonomy and
// ranges, or even two taxonomy facets that use different Category List Field, we can
// use MultiCollectorManager, e.g.:
//
// TODO: add a demo for it.
// TaxonomyFacetsCutter publishDateCutter = new
// TaxonomyFacetsCutter(config.getDimConfig("Publish Date"), taxoReader);
// CountFacetRecorder publishDateRecorder = new CountFacetRecorder(false);
// FacetFieldCollectorManager<CountFacetRecorder> publishDateCollectorManager = new
// FacetFieldCollectorManager<>(publishDateCutter, publishDateRecorder);
// MultiCollectorManager drillDownCollectorManager = new
// MultiCollectorManager(authorCollectorManager, publishDateCollectorManager);
// Object[] results = searcher.search(new MatchAllDocsQuery(), drillDownCollectorManager);
//// (3) search
// Search returns the same Recorder we created - so we can ignore results
searcher.search(new MatchAllDocsQuery(), collectorManager);
//// (4) Get top 10 results by count for Author and Publish Date
// This object is used to get topN results by count
ComparableSupplier<ComparableUtils.ByCountComparable> countComparable =
ComparableUtils.byCount(defaultRecorder);
// We don't actually need to use FacetResult, it is up to client what to do with the results.
// Here we just want to demo that we can still do FacetResult as well
List<FacetResult> results = new ArrayList<>(2);
// This object provides labels for ordinals.
TaxonomyOrdLabelBiMap ordLabels = new TaxonomyOrdLabelBiMap(taxoReader);
for (String dimension : List.of("Author", "Publish Date")) {
//// (4.1) Chain two ordinal iterators to get top N children
int dimOrdinal = ordLabels.getOrd(new FacetLabel(dimension));
OrdinalIterator childrenIterator =
new TaxonomyChildrenOrdinalIterator(
defaultRecorder.recordedOrds(),
taxoReader.getParallelTaxonomyArrays().parents(),
dimOrdinal);
OrdinalIterator topByCountOrds =
new TopnOrdinalIterator<>(childrenIterator, countComparable, 10);
// Get array of final ordinals - we need to use all of them to get labels first, and then to
// get counts,
// but OrdinalIterator only allows reading ordinals once.
int[] resultOrdinals = topByCountOrds.toArray();
//// (4.2) Use faceting results
FacetLabel[] labels = ordLabels.getLabels(resultOrdinals);
List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(
labels[i].lastComponent(), defaultRecorder.getCount(resultOrdinals[i])));
}
int dimensionValue = defaultRecorder.getCount(dimOrdinal);
results.add(
new FacetResult(
dimension,
new String[0],
dimensionValue,
labelsAndValues.toArray(new LabelAndValue[0]),
labelsAndValues.size()));
}
IOUtils.close(indexReader, taxoReader);
return results;
}
/**
* User runs a query and counts facets for exclusive ranges without collecting the matching
* documents
*/
List<FacetResult> exclusiveRangesCountFacetsOnly() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
MultiLongValuesSource valuesSource = MultiLongValuesSource.fromLongField("Price");
// Exclusive ranges example
LongRange[] inputRanges = new LongRange[2];
inputRanges[0] = new LongRange("0-5", 0, true, 5, true);
inputRanges[1] = new LongRange("5-10", 5, false, 10, true);
LongRangeFacetCutter longRangeFacetCutter =
LongRangeFacetCutter.create(valuesSource, inputRanges);
CountFacetRecorder countRecorder = new CountFacetRecorder();
FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
new FacetFieldCollectorManager<>(longRangeFacetCutter, countRecorder);
searcher.search(new MatchAllDocsQuery(), collectorManager);
RangeOrdToLabel ordToLabels = new RangeOrdToLabel(inputRanges);
ComparableSupplier<ComparableUtils.ByCountComparable> countComparable =
ComparableUtils.byCount(countRecorder);
OrdinalIterator topByCountOrds =
new TopnOrdinalIterator<>(countRecorder.recordedOrds(), countComparable, 10);
List<FacetResult> results = new ArrayList<>(2);
int[] resultOrdinals = topByCountOrds.toArray();
FacetLabel[] labels = ordToLabels.getLabels(resultOrdinals);
List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(labels[i].lastComponent(), countRecorder.getCount(resultOrdinals[i])));
}
results.add(
new FacetResult(
"Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0));
System.out.println("Computed counts");
IOUtils.close(indexReader);
return results;
}
List<FacetResult> overlappingRangesCountFacetsOnly() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
MultiLongValuesSource valuesSource = MultiLongValuesSource.fromLongField("Price");
// overlapping ranges example
LongRange[] inputRanges = new LongRange[2];
inputRanges[0] = new LongRange("0-5", 0, true, 5, true);
inputRanges[1] = new LongRange("0-10", 0, true, 10, true);
LongRangeFacetCutter longRangeFacetCutter =
LongRangeFacetCutter.create(valuesSource, inputRanges);
CountFacetRecorder countRecorder = new CountFacetRecorder();
FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
new FacetFieldCollectorManager<>(longRangeFacetCutter, countRecorder);
searcher.search(new MatchAllDocsQuery(), collectorManager);
RangeOrdToLabel ordToLabels = new RangeOrdToLabel(inputRanges);
ComparableSupplier<ComparableUtils.ByCountComparable> countComparable =
ComparableUtils.byCount(countRecorder);
OrdinalIterator topByCountOrds =
new TopnOrdinalIterator<>(countRecorder.recordedOrds(), countComparable, 10);
List<FacetResult> results = new ArrayList<>(2);
int[] resultOrdinals = topByCountOrds.toArray();
FacetLabel[] labels = ordToLabels.getLabels(resultOrdinals);
List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(labels[i].lastComponent(), countRecorder.getCount(resultOrdinals[i])));
}
results.add(
new FacetResult(
"Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0));
System.out.println("Computed counts");
IOUtils.close(indexReader);
return results;
}
List<FacetResult> exclusiveRangesAggregationFacets() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
MultiLongValuesSource valuesSource = MultiLongValuesSource.fromLongField("Price");
// Exclusive ranges example
LongRange[] inputRanges = new LongRange[2];
inputRanges[0] = new LongRange("0-5", 0, true, 5, true);
inputRanges[1] = new LongRange("5-10", 5, false, 10, true);
LongRangeFacetCutter longRangeFacetCutter =
LongRangeFacetCutter.create(valuesSource, inputRanges);
// initialise the aggregations to be computed - a values source + reducer
LongValuesSource[] longValuesSources = new LongValuesSource[2];
Reducer[] reducers = new Reducer[2];
// popularity:max
longValuesSources[0] = DoubleValuesSource.fromDoubleField("Popularity").toLongValuesSource();
reducers[0] = Reducer.MAX;
// units:sum
longValuesSources[1] = LongValuesSource.fromLongField("Units");
reducers[1] = Reducer.SUM;
LongAggregationsFacetRecorder longAggregationsFacetRecorder =
new LongAggregationsFacetRecorder(longValuesSources, reducers);
CountFacetRecorder countRecorder = new CountFacetRecorder();
// Compute both counts and aggregations
MultiFacetsRecorder multiFacetsRecorder =
new MultiFacetsRecorder(countRecorder, longAggregationsFacetRecorder);
FacetFieldCollectorManager<MultiFacetsRecorder> collectorManager =
new FacetFieldCollectorManager<>(longRangeFacetCutter, multiFacetsRecorder);
searcher.search(new MatchAllDocsQuery(), collectorManager);
RangeOrdToLabel ordToLabels = new RangeOrdToLabel(inputRanges);
// Get recorded ords - use either count/aggregations recorder
OrdinalIterator recordedOrds = longAggregationsFacetRecorder.recordedOrds();
// We don't actually need to use FacetResult, it is up to client what to do with the results.
// Here we just want to demo that we can still do FacetResult as well
List<FacetResult> results = new ArrayList<>(2);
ComparableSupplier<ComparableUtils.ByAggregatedValueComparable> comparableSupplier;
OrdinalIterator topOrds;
int[] resultOrdinals;
FacetLabel[] labels;
List<LabelAndValue> labelsAndValues;
// Sort results by units:sum and tie-break by count
comparableSupplier = byAggregatedValue(countRecorder, longAggregationsFacetRecorder, 1);
topOrds = new TopnOrdinalIterator<>(recordedOrds, comparableSupplier, 10);
resultOrdinals = topOrds.toArray();
labels = ordToLabels.getLabels(resultOrdinals);
labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(
labels[i].lastComponent(),
longAggregationsFacetRecorder.getRecordedValue(resultOrdinals[i], 1)));
}
results.add(
new FacetResult(
"Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0));
// note: previous ordinal iterator was exhausted
recordedOrds = longAggregationsFacetRecorder.recordedOrds();
// Sort results by popularity:max and tie-break by count
comparableSupplier = byAggregatedValue(countRecorder, longAggregationsFacetRecorder, 0);
topOrds = new TopnOrdinalIterator<>(recordedOrds, comparableSupplier, 10);
resultOrdinals = topOrds.toArray();
labels = ordToLabels.getLabels(resultOrdinals);
labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(
labels[i].lastComponent(),
longAggregationsFacetRecorder.getRecordedValue(resultOrdinals[i], 0)));
}
results.add(
new FacetResult(
"Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0));
return results;
}
/** User runs a query and counts facets. */
private List<FacetResult> facetsWithSearch() throws IOException {
//// (1) init readers and searcher
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
//// (2) init collectors
// Facet collectors
TaxonomyFacetsCutter defaultTaxoCutter =
new TaxonomyFacetsCutter(DEFAULT_INDEX_FIELD_NAME, config, taxoReader);
CountFacetRecorder defaultRecorder = new CountFacetRecorder();
FacetFieldCollectorManager<CountFacetRecorder> taxoFacetsCollectorManager =
new FacetFieldCollectorManager<>(defaultTaxoCutter, defaultRecorder);
// Hits collector
TopScoreDocCollectorManager hitsCollectorManager =
new TopScoreDocCollectorManager(2, Integer.MAX_VALUE);
// Now wrap them with MultiCollectorManager to collect both hits and facets.
MultiCollectorManager collectorManager =
new MultiCollectorManager(hitsCollectorManager, taxoFacetsCollectorManager);
//// (3) search
Object[] results = searcher.search(new MatchAllDocsQuery(), collectorManager);
TopDocs topDocs = (TopDocs) results[0];
System.out.println(
"Search results: totalHits: "
+ topDocs.totalHits
+ ", collected hits: "
+ topDocs.scoreDocs.length);
// FacetFieldCollectorManager returns the same Recorder it gets - so we can ignore read the
// results from original recorder
// and ignore this value.
// CountFacetRecorder defaultRecorder = (CountFacetRecorder) results[1];
//// (4) Get top 10 results by count for Author and Publish Date
// This object is used to get topN results by count
ComparableSupplier<ComparableUtils.ByCountComparable> countComparable =
ComparableUtils.byCount(defaultRecorder);
// We don't actually need to use FacetResult, it is up to client what to do with the results.
// Here we just want to demo that we can still do FacetResult as well
List<FacetResult> facetResults = new ArrayList<>(2);
// This object provides labels for ordinals.
TaxonomyOrdLabelBiMap ordLabels = new TaxonomyOrdLabelBiMap(taxoReader);
for (String dimension : List.of("Author", "Publish Date")) {
int dimensionOrdinal = ordLabels.getOrd(new FacetLabel(dimension));
//// (4.1) Chain two ordinal iterators to get top N children
OrdinalIterator childrenIterator =
new TaxonomyChildrenOrdinalIterator(
defaultRecorder.recordedOrds(),
taxoReader.getParallelTaxonomyArrays().parents(),
dimensionOrdinal);
OrdinalIterator topByCountOrds =
new TopnOrdinalIterator<>(childrenIterator, countComparable, 10);
// Get array of final ordinals - we need to use all of them to get labels first, and then to
// get counts,
// but OrdinalIterator only allows reading ordinals once.
int[] resultOrdinals = topByCountOrds.toArray();
//// (4.2) Use faceting results
FacetLabel[] labels = ordLabels.getLabels(resultOrdinals);
List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(
labels[i].lastComponent(), defaultRecorder.getCount(resultOrdinals[i])));
}
int dimensionValue = defaultRecorder.getCount(dimensionOrdinal);
facetResults.add(
new FacetResult(
dimension,
new String[0],
dimensionValue,
labelsAndValues.toArray(new LabelAndValue[0]),
labelsAndValues.size()));
}
IOUtils.close(indexReader, taxoReader);
return facetResults;
}
/** User drills down on 'Publish Date/2010', and we return facets for 'Author' */
FacetResult drillDown() throws IOException {
//// (1) init readers and searcher
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
//// (2) init collector
TaxonomyFacetsCutter defaultTaxoCutter =
new TaxonomyFacetsCutter(DEFAULT_INDEX_FIELD_NAME, config, taxoReader);
CountFacetRecorder defaultRecorder = new CountFacetRecorder();
FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
new FacetFieldCollectorManager<>(defaultTaxoCutter, defaultRecorder);
DrillDownQuery q = new DrillDownQuery(config);
q.add("Publish Date", "2010");
//// (3) search
// Right now we return the same Recorder we created - so we can ignore results
searcher.search(q, collectorManager);
//// (4) Get top 10 results by count for Author and Publish Date
// This object is used to get topN results by count
ComparableSupplier<ComparableUtils.ByCountComparable> countComparable =
ComparableUtils.byCount(defaultRecorder);
// This object provides labels for ordinals.
TaxonomyOrdLabelBiMap ordLabels = new TaxonomyOrdLabelBiMap(taxoReader);
String dimension = "Author";
//// (4.1) Chain two ordinal iterators to get top N children
int dimOrdinal = ordLabels.getOrd(new FacetLabel(dimension));
OrdinalIterator childrenIterator =
new TaxonomyChildrenOrdinalIterator(
defaultRecorder.recordedOrds(),
taxoReader.getParallelTaxonomyArrays().parents(),
dimOrdinal);
OrdinalIterator topByCountOrds =
new TopnOrdinalIterator<>(childrenIterator, countComparable, 10);
// Get array of final ordinals - we need to use all of them to get labels first, and then to get
// counts,
// but OrdinalIterator only allows reading ordinals once.
int[] resultOrdinals = topByCountOrds.toArray();
//// (4.2) Use faceting results
FacetLabel[] labels = ordLabels.getLabels(resultOrdinals);
List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(
labels[i].lastComponent(), defaultRecorder.getCount(resultOrdinals[i])));
}
IOUtils.close(indexReader, taxoReader);
int dimensionValue = defaultRecorder.getCount(dimOrdinal);
// We don't actually need to use FacetResult, it is up to client what to do with the results.
// Here we just want to demo that we can still do FacetResult as well
return new FacetResult(
dimension,
new String[0],
dimensionValue,
labelsAndValues.toArray(new LabelAndValue[0]),
labelsAndValues.size());
}
/**
* User drills down on 'Publish Date/2010', and we return facets for both 'Publish Date' and
* 'Author', using DrillSideways.
*/
private List<FacetResult> drillSideways() throws IOException {
//// (1) init readers and searcher
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
//// (2) init drill down query and collectors
TaxonomyFacetsCutter defaultTaxoCutter =
new TaxonomyFacetsCutter(DEFAULT_INDEX_FIELD_NAME, config, taxoReader);
CountFacetRecorder drillDownRecorder = new CountFacetRecorder();
FacetFieldCollectorManager<CountFacetRecorder> drillDownCollectorManager =
new FacetFieldCollectorManager<>(defaultTaxoCutter, drillDownRecorder);
DrillDownQuery q = new DrillDownQuery(config);
//// (2.1) add query and collector dimensions
q.add("Publish Date", "2010");
CountFacetRecorder publishDayDimensionRecorder = new CountFacetRecorder();
// Note that it is safe to use the same FacetsCutter here because we create Leaf cutter for each
// leaf for each
// FacetFieldCollectorManager anyway, and leaf cutter are not merged or anything like that.
FacetFieldCollectorManager<CountFacetRecorder> publishDayDimensionCollectorManager =
new FacetFieldCollectorManager<>(defaultTaxoCutter, publishDayDimensionRecorder);
List<FacetFieldCollectorManager<CountFacetRecorder>> drillSidewaysManagers =
List.of(publishDayDimensionCollectorManager);
//// (3) search
// Right now we return the same Recorder we created - so we can ignore results
DrillSideways ds = new DrillSideways(searcher, config, taxoReader);
ds.search(q, drillDownCollectorManager, drillSidewaysManagers);
//// (4) Get top 10 results by count for Author
List<FacetResult> facetResults = new ArrayList<>(2);
// This object provides labels for ordinals.
TaxonomyOrdLabelBiMap ordLabels = new TaxonomyOrdLabelBiMap(taxoReader);
// This object is used to get topN results by count
ComparableSupplier<ComparableUtils.ByCountComparable> countComparable =
ComparableUtils.byCount(drillDownRecorder);
//// (4.1) Chain two ordinal iterators to get top N children
int dimOrdinal = ordLabels.getOrd(new FacetLabel("Author"));
OrdinalIterator childrenIterator =
new TaxonomyChildrenOrdinalIterator(
drillDownRecorder.recordedOrds(),
taxoReader.getParallelTaxonomyArrays().parents(),
dimOrdinal);
OrdinalIterator topByCountOrds =
new TopnOrdinalIterator<>(childrenIterator, countComparable, 10);
// Get array of final ordinals - we need to use all of them to get labels first, and then to get
// counts,
// but OrdinalIterator only allows reading ordinals once.
int[] resultOrdinals = topByCountOrds.toArray();
//// (4.2) Use faceting results
FacetLabel[] labels = ordLabels.getLabels(resultOrdinals);
List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(
labels[i].lastComponent(), drillDownRecorder.getCount(resultOrdinals[i])));
}
int dimensionValue = drillDownRecorder.getCount(dimOrdinal);
facetResults.add(
new FacetResult(
"Author",
new String[0],
dimensionValue,
labelsAndValues.toArray(new LabelAndValue[0]),
labelsAndValues.size()));
//// (5) Same process, but for Publish Date drill sideways dimension
countComparable = ComparableUtils.byCount(publishDayDimensionRecorder);
//// (4.1) Chain two ordinal iterators to get top N children
dimOrdinal = ordLabels.getOrd(new FacetLabel("Publish Date"));
childrenIterator =
new TaxonomyChildrenOrdinalIterator(
publishDayDimensionRecorder.recordedOrds(),
taxoReader.getParallelTaxonomyArrays().parents(),
dimOrdinal);
topByCountOrds = new TopnOrdinalIterator<>(childrenIterator, countComparable, 10);
// Get array of final ordinals - we need to use all of them to get labels first, and then to get
// counts,
// but OrdinalIterator only allows reading ordinals once.
resultOrdinals = topByCountOrds.toArray();
//// (4.2) Use faceting results
labels = ordLabels.getLabels(resultOrdinals);
labelsAndValues = new ArrayList<>(labels.length);
for (int i = 0; i < resultOrdinals.length; i++) {
labelsAndValues.add(
new LabelAndValue(
labels[i].lastComponent(), publishDayDimensionRecorder.getCount(resultOrdinals[i])));
}
dimensionValue = publishDayDimensionRecorder.getCount(dimOrdinal);
facetResults.add(
new FacetResult(
"Publish Date",
new String[0],
dimensionValue,
labelsAndValues.toArray(new LabelAndValue[0]),
labelsAndValues.size()));
IOUtils.close(indexReader, taxoReader);
return facetResults;
}
/** Runs the simple search example. */
public List<FacetResult> runSimpleFacetsWithSearch() throws IOException {
index();
return simpleFacetsWithSearch();
}
/** Runs the simple drill sideways example. */
public List<FacetResult> runSimpleFacetsWithDrillSideways() throws IOException {
index();
return simpleFacetsWithDrillSideways();
}
/** Runs the search example. */
public List<FacetResult> runFacetOnly() throws IOException {
index();
return facetsOnly();
}
/** Runs the search example. */
public List<FacetResult> runSearch() throws IOException {
index();
return facetsWithSearch();
}
/** Runs the drill-down example. */
public FacetResult runDrillDown() throws IOException {
index();
return drillDown();
}
/** Runs the drill-sideways example. */
public List<FacetResult> runDrillSideways() throws IOException {
index();
return drillSideways();
}
/** Runs the example of non overlapping range facets */
public List<FacetResult> runNonOverlappingRangesCountFacetsOnly() throws IOException {
index();
return exclusiveRangesCountFacetsOnly();
}
/** Runs the example of overlapping range facets */
public List<FacetResult> runOverlappingRangesCountFacetsOnly() throws IOException {
index();
return overlappingRangesCountFacetsOnly();
}
/** Runs the example of collecting long aggregations for non overlapping range facets. */
public List<FacetResult> runNonOverlappingRangesAggregationFacets() throws IOException {
index();
return exclusiveRangesAggregationFacets();
}
/** Runs the search and drill-down examples and prints the results. */
public static void main(String[] args) throws Exception {
SandboxFacetsExample example = new SandboxFacetsExample();
System.out.println("Simple facet counting example:");
System.out.println("---------------------------------------------");
for (FacetResult result : example.runSimpleFacetsWithSearch()) {
System.out.println(result);
}
System.out.println("Simple facet counting for drill sideways example:");
System.out.println("---------------------------------------------");
for (FacetResult result : example.runSimpleFacetsWithDrillSideways()) {
System.out.println(result);
}
System.out.println("Facet counting example:");
System.out.println("-----------------------");
List<FacetResult> results1 = example.runFacetOnly();
System.out.println("Author: " + results1.get(0));
System.out.println("Publish Date: " + results1.get(1));
System.out.println("Facet counting example (combined facets and search):");
System.out.println("-----------------------");
List<FacetResult> results = example.runSearch();
System.out.println("Author: " + results.get(0));
System.out.println("Publish Date: " + results.get(1));
System.out.println("Facet drill-down example (Publish Date/2010):");
System.out.println("---------------------------------------------");
System.out.println("Author: " + example.runDrillDown());
System.out.println("Facet drill-sideways example (Publish Date/2010):");
System.out.println("---------------------------------------------");
for (FacetResult result : example.runDrillSideways()) {
System.out.println(result);
}
System.out.println("Facet counting example with exclusive ranges:");
System.out.println("---------------------------------------------");
for (FacetResult result : example.runNonOverlappingRangesCountFacetsOnly()) {
System.out.println(result);
}
System.out.println("Facet counting example with overlapping ranges:");
System.out.println("---------------------------------------------");
for (FacetResult result : example.runOverlappingRangesCountFacetsOnly()) {
System.out.println(result);
}
System.out.println("Facet aggregation example with exclusive ranges:");
System.out.println("---------------------------------------------");
for (FacetResult result : example.runNonOverlappingRangesAggregationFacets()) {
System.out.println(result);
}
}
}
|
apache/openjpa | 37,434 | openjpa-persistence/src/main/java/org/apache/openjpa/persistence/PersistenceMetaDataDefaults.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.persistence;
import static jakarta.persistence.AccessType.FIELD;
import static jakarta.persistence.AccessType.PROPERTY;
import static org.apache.openjpa.persistence.PersistenceStrategy.BASIC;
import static org.apache.openjpa.persistence.PersistenceStrategy.ELEM_COLL;
import static org.apache.openjpa.persistence.PersistenceStrategy.EMBEDDED;
import static org.apache.openjpa.persistence.PersistenceStrategy.MANY_MANY;
import static org.apache.openjpa.persistence.PersistenceStrategy.MANY_ONE;
import static org.apache.openjpa.persistence.PersistenceStrategy.ONE_MANY;
import static org.apache.openjpa.persistence.PersistenceStrategy.ONE_ONE;
import static org.apache.openjpa.persistence.PersistenceStrategy.PERS;
import static org.apache.openjpa.persistence.PersistenceStrategy.PERS_COLL;
import static org.apache.openjpa.persistence.PersistenceStrategy.PERS_MAP;
import static org.apache.openjpa.persistence.PersistenceStrategy.TRANSIENT;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.security.AccessController;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import jakarta.persistence.Access;
import jakarta.persistence.AccessType;
import jakarta.persistence.Basic;
import jakarta.persistence.ElementCollection;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Embedded;
import jakarta.persistence.EmbeddedId;
import jakarta.persistence.ManyToMany;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany;
import jakarta.persistence.OneToOne;
import jakarta.persistence.PostLoad;
import jakarta.persistence.PostPersist;
import jakarta.persistence.PostRemove;
import jakarta.persistence.PostUpdate;
import jakarta.persistence.PrePersist;
import jakarta.persistence.PreRemove;
import jakarta.persistence.PreUpdate;
import jakarta.persistence.Transient;
import org.apache.openjpa.conf.OpenJPAConfiguration;
import org.apache.openjpa.enhance.Reflection;
import org.apache.openjpa.lib.log.Log;
import org.apache.openjpa.lib.util.J2DoPrivHelper;
import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.meta.AbstractMetaDataDefaults;
import org.apache.openjpa.meta.AccessCode;
import org.apache.openjpa.meta.ClassMetaData;
import org.apache.openjpa.meta.FieldMetaData;
import org.apache.openjpa.meta.JavaTypes;
import org.apache.openjpa.meta.MetaDataRepository;
import org.apache.openjpa.meta.ValueMetaData;
import org.apache.openjpa.util.InternalException;
import org.apache.openjpa.util.MetaDataException;
import org.apache.openjpa.util.UserException;
/**
* JPA-based metadata defaults.
*
* @author Patrick Linskey
* @author Abe White
* @author Pinaki Poddar
*/
public class PersistenceMetaDataDefaults
extends AbstractMetaDataDefaults {
private static final Localizer _loc = Localizer.forPackage
(PersistenceMetaDataDefaults.class);
private static final Map<Class<?>, PersistenceStrategy> _strats =
new HashMap<>();
private static final Set<String> _ignoredAnnos = new HashSet<>();
static {
_strats.put(Basic.class, BASIC);
_strats.put(ManyToOne.class, MANY_ONE);
_strats.put(OneToOne.class, ONE_ONE);
_strats.put(Embedded.class, EMBEDDED);
_strats.put(EmbeddedId.class, EMBEDDED);
_strats.put(OneToMany.class, ONE_MANY);
_strats.put(ManyToMany.class, MANY_MANY);
_strats.put(Persistent.class, PERS);
_strats.put(PersistentCollection.class, PERS_COLL);
_strats.put(ElementCollection.class, ELEM_COLL);
_strats.put(PersistentMap.class, PERS_MAP);
_ignoredAnnos.add(DetachedState.class.getName());
_ignoredAnnos.add(PostLoad.class.getName());
_ignoredAnnos.add(PostPersist.class.getName());
_ignoredAnnos.add(PostRemove.class.getName());
_ignoredAnnos.add(PostUpdate.class.getName());
_ignoredAnnos.add(PrePersist.class.getName());
_ignoredAnnos.add(PreRemove.class.getName());
_ignoredAnnos.add(PreUpdate.class.getName());
}
/**
* Set of Inclusion Filters based on member type, access type or transient
* annotations. Used to determine the persistent field/methods.
*/
protected AccessFilter propertyAccessFilter = new AccessFilter(PROPERTY);
protected AccessFilter fieldAccessFilter = new AccessFilter(FIELD);
protected MemberFilter fieldFilter = new MemberFilter(Field.class);
protected MemberFilter methodFilter = new MemberFilter(Method.class);
protected TransientFilter nonTransientFilter = new TransientFilter(false);
protected AnnotatedFilter annotatedFilter = new AnnotatedFilter();
protected GetterFilter getterFilter = new GetterFilter();
protected SetterFilter setterFilter = new SetterFilter();
private Boolean _isAbstractMappingUniDirectional = null;
private Boolean _isNonDefaultMappingAllowed = null;
private String _defaultSchema;
private Boolean _isCascadePersistPersistenceUnitDefaultEnabled = null;
public PersistenceMetaDataDefaults() {
setCallbackMode(CALLBACK_RETHROW | CALLBACK_ROLLBACK |
CALLBACK_FAIL_FAST);
setDataStoreObjectIdFieldUnwrapped(true);
}
/**
* Return the code for the strategy of the given member. Return null if
* no strategy.
*/
public static PersistenceStrategy getPersistenceStrategy
(FieldMetaData fmd, Member member) {
return getPersistenceStrategy(fmd, member, false);
}
/**
* Return the code for the strategy of the given member. Return null if
* no strategy.
*/
public static PersistenceStrategy getPersistenceStrategy
(FieldMetaData fmd, Member member, boolean ignoreTransient) {
if (member == null)
return null;
AnnotatedElement el = (AnnotatedElement) member;
if (!ignoreTransient && AccessController.doPrivileged(J2DoPrivHelper
.isAnnotationPresentAction(el, Transient.class)))
return TRANSIENT;
if (fmd != null
&& fmd.getManagement() != FieldMetaData.MANAGE_PERSISTENT)
return null;
// look for persistence strategy in annotation table
PersistenceStrategy pstrat = null;
for (Annotation anno : el.getDeclaredAnnotations()) {
if (pstrat != null && _strats.containsKey(anno.annotationType()))
throw new MetaDataException(_loc.get("already-pers", member));
if (pstrat == null)
pstrat = _strats.get(anno.annotationType());
}
if (pstrat != null)
return pstrat;
Class type;
int code;
if (fmd != null) {
type = fmd.getType();
code = fmd.getTypeCode();
} else if (member instanceof Field) {
type = ((Field) member).getType();
code = JavaTypes.getTypeCode(type);
} else {
type = ((Method) member).getReturnType();
code = JavaTypes.getTypeCode(type);
}
switch (code) {
case JavaTypes.ARRAY:
if (type == byte[].class
|| type == char[].class
|| type == Byte[].class
|| type == Character[].class)
return BASIC;
break;
case JavaTypes.BOOLEAN:
case JavaTypes.BOOLEAN_OBJ:
case JavaTypes.BYTE:
case JavaTypes.BYTE_OBJ:
case JavaTypes.CHAR:
case JavaTypes.CHAR_OBJ:
case JavaTypes.DOUBLE:
case JavaTypes.DOUBLE_OBJ:
case JavaTypes.FLOAT:
case JavaTypes.FLOAT_OBJ:
case JavaTypes.INT:
case JavaTypes.INT_OBJ:
case JavaTypes.LONG:
case JavaTypes.LONG_OBJ:
case JavaTypes.SHORT:
case JavaTypes.SHORT_OBJ:
case JavaTypes.STRING:
case JavaTypes.BIGDECIMAL:
case JavaTypes.BIGINTEGER:
case JavaTypes.DATE:
case JavaTypes.LOCAL_DATE:
case JavaTypes.LOCAL_TIME:
case JavaTypes.LOCAL_DATETIME:
case JavaTypes.OFFSET_TIME:
case JavaTypes.OFFSET_DATETIME:
return BASIC;
case JavaTypes.OBJECT:
if (Enum.class.isAssignableFrom(type))
return BASIC;
break;
}
//### EJB3: what if defined in XML?
if (AccessController.doPrivileged(J2DoPrivHelper
.isAnnotationPresentAction(type, Embeddable.class)))
return EMBEDDED;
if (Serializable.class.isAssignableFrom(type))
return BASIC;
return null;
}
/**
* Auto-configuration method for the default access type of base classes
* with ACCESS_UNKNOWN
*/
public void setDefaultAccessType(String type) {
if ("PROPERTY".equals(type.toUpperCase(Locale.ENGLISH)))
setDefaultAccessType(AccessCode.PROPERTY);
else if ("FIELD".equals(type.toUpperCase(Locale.ENGLISH)))
setDefaultAccessType(AccessCode.FIELD);
else
throw new IllegalArgumentException(_loc.get("access-invalid",
type).toString());
}
/**
* Populates the given class metadata. The access style determines which
* field and/or getter method will contribute as the persistent property
* of the given class. If the given access is unknown, then the access
* type is to be determined at first.
*
* @see #determineAccessType(ClassMetaData)
*/
@Override
public void populate(ClassMetaData meta, int access) {
populate(meta, access, false);
}
/**
* Populates the given class metadata. The access style determines which
* field and/or getter method will contribute as the persistent property
* of the given class. If the given access is unknown, then the access
* type is to be determined at first.
*
* @see #determineAccessType(ClassMetaData)
*/
@Override
public void populate(ClassMetaData meta, int access, boolean ignoreTransient) {
if (AccessCode.isUnknown(access)) {
access = determineAccessType(meta);
}
if (AccessCode.isUnknown(access)) {
error(meta, _loc.get("access-unknown", meta));
}
super.populate(meta, access, ignoreTransient);
meta.setDetachable(true);
// do not call get*Fields as it will lock down the fields.
}
@Override
protected void populate(FieldMetaData fmd) {
setCascadeNone(fmd);
setCascadeNone(fmd.getKey());
setCascadeNone(fmd.getElement());
}
/**
* Turns off auto cascading of persist, refresh, attach, detach.
*/
static void setCascadeNone(ValueMetaData vmd) {
vmd.setCascadePersist(ValueMetaData.CASCADE_NONE);
vmd.setCascadeRefresh(ValueMetaData.CASCADE_NONE);
vmd.setCascadeAttach(ValueMetaData.CASCADE_NONE);
vmd.setCascadeDetach(ValueMetaData.CASCADE_NONE);
}
ClassMetaData getCachedSuperclassMetaData(ClassMetaData meta) {
if (meta == null)
return null;
Class<?> cls = meta.getDescribedType();
Class<?> sup = cls.getSuperclass();
if (sup == null || "java.lang.Object".equals(
sup.getName()))
return null;
MetaDataRepository repos = meta.getRepository();
ClassMetaData supMeta = repos.getCachedMetaData(sup);
if (supMeta == null)
supMeta = repos.getMetaData(sup, null, false);
return supMeta;
}
/**
* Recursive helper to determine access type based on annotation placement
* on members for the given class without an explicit access annotation.
*
* @return must return a not-unknown access code
*/
private int determineAccessType(ClassMetaData meta) {
if (meta == null)
return AccessCode.UNKNOWN;
if (meta.getDescribedType().isInterface()) // managed interfaces
return AccessCode.PROPERTY;
if (!AccessCode.isUnknown(meta))
return meta.getAccessType();
int access = determineExplicitAccessType(meta.getDescribedType());
if (!AccessCode.isUnknown(access))
return access;
access = determineImplicitAccessType(meta.getDescribedType(),
meta.getRepository().getConfiguration());
if (!AccessCode.isUnknown(access))
return access;
ClassMetaData sup = getCachedSuperclassMetaData(meta);
ClassMetaData tmpSup = sup;
while (tmpSup != null && tmpSup.isExplicitAccess()) {
tmpSup = getCachedSuperclassMetaData(tmpSup);
if (tmpSup != null) {
sup = tmpSup;
}
}
if (sup != null && !AccessCode.isUnknown(sup))
return sup.getAccessType();
trace(meta, _loc.get("access-default", meta, AccessCode.toClassString(getDefaultAccessType())));
return getDefaultAccessType();
}
/**
* Determines the access type for the given class by placement of
* annotations on field or getter method. Does not consult the
* super class.
*
* Annotation can be placed on either fields or getters but not on both.
* If no field or getter is annotated then UNKNOWN access code is returned.
*/
private int determineImplicitAccessType(Class<?> cls, OpenJPAConfiguration
conf) {
if (cls.isInterface()) // Managed interfaces
return AccessCode.PROPERTY;
Field[] allFields = AccessController.doPrivileged(J2DoPrivHelper.
getDeclaredFieldsAction(cls));
Method[] methods = AccessController.doPrivileged(
J2DoPrivHelper.getDeclaredMethodsAction(cls));
List<Field> fields = filter(allFields, new TransientFilter(true));
/*
* OpenJPA 1.x permitted private properties to be persistent. This is
* contrary to the JPA 1.0 specification, which states that persistent
* properties must be public or protected. OpenJPA 2.0+ will adhere
* to the specification by default, but provides a compatibility
* option to provide pre-2.0 behavior.
*/
getterFilter.setIncludePrivate(
conf.getCompatibilityInstance().getPrivatePersistentProperties());
List<Method> getters = filter(methods, getterFilter);
if (fields.isEmpty() && getters.isEmpty())
return AccessCode.EMPTY;
fields = filter(fields, annotatedFilter);
getters = filter(getters, annotatedFilter);
List<Method> setters = filter(methods, setterFilter);
getters = matchGetterAndSetter(getters, setters);
boolean mixed = !fields.isEmpty() && !getters.isEmpty();
if (mixed)
throw new UserException(_loc.get("access-mixed",
cls, toFieldNames(fields), toMethodNames(getters)));
if (!fields.isEmpty()) {
return AccessCode.FIELD;
}
if (!getters.isEmpty()) {
return AccessCode.PROPERTY;
}
return AccessCode.UNKNOWN;
}
/**
* Explicit access type, if any, is generally detected by the parser. This
* is only used for metadata of an embeddable type which is encountered
* as a field during some other owning entity.
*
* @see ValueMetaData#addEmbeddedMetaData()
*/
private int determineExplicitAccessType(Class<?> cls) {
Access access = cls.getAnnotation(Access.class);
return access == null ? AccessCode.UNKNOWN : ((access.value() ==
AccessType.FIELD ? AccessCode.FIELD : AccessCode.PROPERTY) |
AccessCode.EXPLICIT);
}
/**
* Matches the given getters with the given setters. Removes the getters
* that do not have a corresponding setter.
*/
private List<Method> matchGetterAndSetter(List<Method> getters,
List<Method> setters) {
Collection<Method> unmatched = new ArrayList<>();
for (Method getter : getters) {
String getterName = getter.getName();
Class<?> getterReturnType = getter.getReturnType();
String expectedSetterName = "set" + getterName.substring(
(isBooleanGetter(getter) ? "is" : "get").length());
boolean matched = false;
for (Method setter : setters) {
Class<?> setterArgType = setter.getParameterTypes()[0];
String actualSetterName = setter.getName();
matched = actualSetterName.equals(expectedSetterName)
&& setterArgType == getterReturnType;
if (matched)
break;
}
if (!matched) {
unmatched.add(getter);
}
}
getters.removeAll(unmatched);
return getters;
}
/**
* Gets the fields that are possible candidate for being persisted. The
* result depends on the current access style of the given class.
*/
List<Field> getPersistentFields(ClassMetaData meta, boolean ignoreTransient) {
boolean explicit = meta.isExplicitAccess();
boolean unknown = AccessCode.isUnknown(meta);
boolean isField = AccessCode.isField(meta);
if (explicit || unknown || isField) {
Field[] fields = AccessController.doPrivileged(J2DoPrivHelper.
getDeclaredFieldsAction(meta.getDescribedType()));
return filter(fields, fieldFilter,
ignoreTransient ? null : nonTransientFilter,
unknown || isField ? null : annotatedFilter,
explicit ? (isField ? null : fieldAccessFilter) : null);
}
return Collections.EMPTY_LIST;
}
/**
* Gets the methods that are possible candidate for being persisted. The
* result depends on the current access style of the given class.
*/
List<Method> getPersistentMethods(ClassMetaData meta, boolean ignoreTransient) {
boolean explicit = meta.isExplicitAccess();
boolean unknown = AccessCode.isUnknown(meta.getAccessType());
boolean isProperty = AccessCode.isProperty(meta.getAccessType());
if (explicit || unknown || isProperty) {
Method[] publicMethods = AccessController.doPrivileged(
J2DoPrivHelper.getDeclaredMethodsAction(meta.getDescribedType()));
/*
* OpenJPA 1.x permitted private accessor properties to be persistent. This is
* contrary to the JPA 1.0 specification, which states that persistent
* properties must be public or protected. OpenJPA 2.0+ will adhere
* to the specification by default, but provides a compatibility
* option to provide pre-2.0 behavior.
*/
getterFilter.setIncludePrivate(
meta.getRepository().getConfiguration().getCompatibilityInstance().getPrivatePersistentProperties());
List<Method> getters = filter(publicMethods, methodFilter,
getterFilter,
ignoreTransient ? null : nonTransientFilter,
unknown || isProperty ? null : annotatedFilter,
explicit ? (isProperty ? null : propertyAccessFilter) : null);
List<Method> setters = filter(publicMethods, setterFilter);
return getters = matchGetterAndSetter(getters, setters);
}
return Collections.EMPTY_LIST;
}
/**
* Gets the members that are backing members for attributes being persisted.
* Unlike {@linkplain #getPersistentFields(ClassMetaData)} and
* {@linkplain #getPersistentMethods(ClassMetaData)} which returns
* <em>possible</em> candidates, the result of this method is definite.
*
* Side-effect of this method is if the given class metadata has
* no access type set, this method will set it.
*/
@Override
public List<Member> getPersistentMembers(ClassMetaData meta, boolean ignoreTransient) {
List<Member> members = new ArrayList<>();
List<Field> fields = getPersistentFields(meta, ignoreTransient);
List<Method> getters = getPersistentMethods(meta, ignoreTransient);
boolean isMixed = !fields.isEmpty() && !getters.isEmpty();
boolean isEmpty = fields.isEmpty() && getters.isEmpty();
boolean explicit = meta.isExplicitAccess();
boolean unknown = AccessCode.isUnknown(meta.getAccessType());
if (isEmpty) {
warn(meta, _loc.get("access-empty", meta));
return Collections.EMPTY_LIST;
}
if (explicit) {
if (isMixed) {
assertNoDuplicate(fields, getters);
meta.setAccessType(AccessCode.MIXED | meta.getAccessType());
members.addAll(fields);
members.addAll(getters);
} else {
members.addAll(fields.isEmpty() ? getters : fields);
}
} else {
if (isMixed)
error(meta, _loc.get("access-mixed", meta, fields, getters));
if (fields.isEmpty()) {
meta.setAccessType(AccessCode.PROPERTY);
members.addAll(getters);
} else {
meta.setAccessType(AccessCode.FIELD);
members.addAll(fields);
}
}
return members;
}
void assertNoDuplicate(List<Field> fields, List<Method> getters) {
}
void error(ClassMetaData meta, Localizer.Message message) {
Log log = meta.getRepository().getConfiguration()
.getLog(OpenJPAConfiguration.LOG_RUNTIME);
log.error(message.toString());
throw new UserException(message.toString());
}
void warn(ClassMetaData meta, Localizer.Message message) {
Log log = meta.getRepository().getConfiguration()
.getLog(OpenJPAConfiguration.LOG_RUNTIME);
log.warn(message.toString());
}
void trace(ClassMetaData meta, Localizer.Message message) {
Log log = meta.getRepository().getConfiguration()
.getLog(OpenJPAConfiguration.LOG_RUNTIME);
log.trace(message.toString());
}
@Override
protected List<String> getFieldAccessNames(ClassMetaData meta) {
return toNames(getPersistentFields(meta, false));
}
@Override
protected List<String> getPropertyAccessNames(ClassMetaData meta) {
return toNames(getPersistentMethods(meta, false));
}
protected boolean isDefaultPersistent(ClassMetaData meta, Member member,
String name) {
return isDefaultPersistent(meta, member, name, false);
}
@Override
protected boolean isDefaultPersistent(ClassMetaData meta, Member member,
String name, boolean ignoreTransient) {
int mods = member.getModifiers();
if (Modifier.isTransient(mods))
return false;
int access = meta.getAccessType();
if (member instanceof Field) {
// If mixed or unknown, default property access, keep explicit
// field members
if (AccessCode.isProperty(access)) {
if (!isAnnotatedAccess(member, AccessType.FIELD))
return false;
}
}
else if (member instanceof Method) {
// If mixed or unknown, field default access, keep explicit property
// members
if (AccessCode.isField(access)) {
if (!isAnnotatedAccess(member, AccessType.PROPERTY))
return false;
}
try {
String setterName;
if (member.getName().startsWith("is")) {
setterName = "set" + member.getName().substring(2);
} else {
setterName = "set" + member.getName().substring(3);
}
// check for setters for methods
Method setter =
(Method) AccessController.doPrivileged(J2DoPrivHelper.getDeclaredMethodAction(
meta.getDescribedType(), setterName, new Class[] { ((Method) member).getReturnType() }));
if (setter == null && !isAnnotatedTransient(member)) {
logNoSetter(meta, name, null);
return false;
}
} catch (Exception e) {
// e.g., NoSuchMethodException
if (!isAnnotatedTransient(member))
logNoSetter(meta, name, e);
return false;
}
}
PersistenceStrategy strat = getPersistenceStrategy(null, member, ignoreTransient);
if (strat == null) {
warn(meta, _loc.get("no-pers-strat", meta.getDescribedTypeString() + "." + name));
return false;
} else if (strat == PersistenceStrategy.TRANSIENT) {
return false;
} else {
return true;
}
}
private boolean isAnnotatedTransient(Member member) {
return member instanceof AnnotatedElement
&& AccessController.doPrivileged(J2DoPrivHelper
.isAnnotationPresentAction(((AnnotatedElement) member),
Transient.class));
}
/**
* May be used to determine if member is annotated with the specified
* access type.
* @param member class member
* @param type expected access type
* @return true if access is specified on member and that access
* type matches the expected type
*/
private boolean isAnnotatedAccess(Member member, AccessType type) {
if (member == null)
return false;
Access anno =
AccessController.doPrivileged(J2DoPrivHelper
.getAnnotationAction((AnnotatedElement)member,
Access.class));
return anno != null && anno.value() == type;
}
private boolean isAnnotated(Member member) {
return member != null && member instanceof AnnotatedElement
&& annotatedFilter.includes((AnnotatedElement)member);
}
private boolean isNotTransient(Member member) {
return member != null && member instanceof AnnotatedElement
&& nonTransientFilter.includes((AnnotatedElement)member);
}
/**
* Gets either the instance field or the getter method depending upon the
* access style of the given meta-data.
*/
@Override
public Member getMemberByProperty(ClassMetaData meta, String property,
int access, boolean applyDefaultRule) {
Class<?> cls = meta.getDescribedType();
Field field = Reflection.findField(cls, property, false);
Method getter = Reflection.findGetter(cls, property, false);
Method setter = Reflection.findSetter(cls, property, false);
int accessCode = AccessCode.isUnknown(access) ? meta.getAccessType() :
access;
if (field == null && getter == null)
error(meta, _loc.get("access-no-property", cls, property));
if ((isNotTransient(getter) && isAnnotated(getter)) &&
isNotTransient(field) && isAnnotated(field))
throw new IllegalStateException(_loc.get("access-duplicate",
field, getter).toString());
if (AccessCode.isField(accessCode)) {
if (isAnnotatedAccess(getter, AccessType.PROPERTY)) {
meta.setAccessType(AccessCode.MIXED | meta.getAccessType());
return getter;
}
return field == null ? getter : field;
} else if (AccessCode.isProperty(accessCode)) {
if (isAnnotatedAccess(field, AccessType.FIELD)) {
meta.setAccessType(AccessCode.MIXED | meta.getAccessType());
return field;
}
return getter == null ? field : getter;
} else if (AccessCode.isUnknown(accessCode)) {
if (isAnnotated(field)) {
meta.setAccessType(AccessCode.FIELD);
return field;
} else if (isAnnotated(getter)) {
meta.setAccessType(AccessCode.PROPERTY);
return getter;
} else {
warn(meta, _loc.get("access-none", meta, property));
throw new IllegalStateException(
_loc.get("access-none", meta, property).toString());
}
} else {
throw new InternalException(meta + " " +
AccessCode.toClassString(meta.getAccessType()));
}
}
// ========================================================================
// Selection Filters select specific elements from a collection.
// Used to determine the persistent members of a given class.
// ========================================================================
/**
* Inclusive element filtering predicate.
*
*/
private interface InclusiveFilter<T extends AnnotatedElement> {
/**
* Return true to include the given element.
*/
boolean includes(T e);
}
/**
* Filter the given collection with the conjunction of filters. The given
* collection itself is not modified.
*/
<T extends AnnotatedElement> List<T> filter(T[] array,
InclusiveFilter... filters) {
List<T> result = new ArrayList<>();
for (T e : array) {
boolean include = true;
for (InclusiveFilter f : filters) {
if (f != null && !f.includes(e)) {
include = false;
break;
}
}
if (include)
result.add(e);
}
return result;
}
<T extends AnnotatedElement> List<T> filter(List<T> list,
InclusiveFilter... filters) {
List<T> result = new ArrayList<>();
for (T e : list) {
boolean include = true;
for (InclusiveFilter f : filters) {
if (f != null && !f.includes(e)) {
include = false;
break;
}
}
if (include)
result.add(e);
}
return result;
}
/**
* Selects getter method. A getter method name starts with 'get', returns a
* non-void type and has no argument. Or starts with 'is', returns a boolean
* and has no argument.
*
*/
static class GetterFilter implements InclusiveFilter<Method> {
private boolean includePrivate;
@Override
public boolean includes(Method method) {
return isGetter(method, isIncludePrivate());
}
public void setIncludePrivate(boolean includePrivate) {
this.includePrivate = includePrivate;
}
public boolean isIncludePrivate() {
return includePrivate;
}
}
/**
* Selects setter method. A setter method name starts with 'set', returns a
* void and has single argument.
*
*/
static class SetterFilter implements InclusiveFilter<Method> {
@Override
public boolean includes(Method method) {
return isSetter(method);
}
/**
* Affirms if the given method matches the following signature
* <code> public void setXXX(T t) </code>
*/
public static boolean isSetter(Method method) {
String methodName = method.getName();
return startsWith(methodName, "set")
&& method.getParameterTypes().length == 1
&& method.getReturnType() == void.class;
}
}
/**
* Selects elements which is annotated with @Access annotation and that
* annotation has the given AccessType value.
*
*/
static class AccessFilter implements InclusiveFilter<AnnotatedElement> {
final AccessType target;
public AccessFilter(AccessType target) {
this.target = target;
}
@Override
public boolean includes(AnnotatedElement obj) {
Access access = obj.getAnnotation(Access.class);
return access != null && access.value().equals(target);
}
}
/**
* Selects elements which is annotated with @Access annotation and that
* annotation has the given AccessType value.
*
*/
static class MemberFilter implements InclusiveFilter<AnnotatedElement> {
final Class<?> target;
public MemberFilter(Class<?> target) {
this.target = target;
}
@Override
public boolean includes(AnnotatedElement obj) {
int mods = ((Member)obj).getModifiers();
return obj.getClass() == target &&
!(Modifier.isStatic(mods) || Modifier.isFinal(mods)
|| Modifier.isTransient(mods) || Modifier.isNative(mods));
}
}
/**
* Selects non-transient elements. Selectively will examine only the
* transient field modifier.
*/
static class TransientFilter implements InclusiveFilter<AnnotatedElement> {
final boolean modifierOnly;
public TransientFilter(boolean modOnly) {
modifierOnly = modOnly;
}
@Override
public boolean includes(AnnotatedElement obj) {
if (modifierOnly) {
return !Modifier.isTransient(((Member)obj).getModifiers());
}
return !obj.isAnnotationPresent(Transient.class) &&
!Modifier.isTransient(((Member)obj).getModifiers());
}
}
/**
* Selects all element annotated with <code>jakarta.persistence.*</code> or
* <code>org.apache.openjpa.*</code> annotation except the annotations
* marked to be ignored.
*/
static class AnnotatedFilter implements InclusiveFilter<AnnotatedElement> {
@Override
public boolean includes(AnnotatedElement obj) {
Annotation[] annos = AccessController.doPrivileged(J2DoPrivHelper
.getAnnotationsAction(obj));
for (Annotation anno : annos) {
String name = anno.annotationType().getName();
if ((name.startsWith("jakarta.persistence.")
|| name.startsWith("org.apache.openjpa.persistence."))
&& !_ignoredAnnos.contains(name))
return true;
}
return false;
}
}
private void logNoSetter(ClassMetaData meta, String name, Exception e) {
Log log = meta.getRepository().getConfiguration()
.getLog(OpenJPAConfiguration.LOG_METADATA);
if (log.isWarnEnabled())
log.warn(_loc.get("no-setter-for-getter", name,
meta.getDescribedType().getName()));
else if (log.isTraceEnabled())
// log the exception, if any, if we're in trace-level debugging
log.warn(_loc.get("no-setter-for-getter", name,
meta.getDescribedType().getName()), e);
}
private Log getLog(ClassMetaData meta) {
return meta.getRepository().getConfiguration()
.getLog(OpenJPAConfiguration.LOG_METADATA);
}
String toFieldNames(List<Field> fields) {
return fields.toString();
}
String toMethodNames(List<Method> methods) {
return methods.toString();
}
@Override
public boolean isAbstractMappingUniDirectional(OpenJPAConfiguration conf) {
if (_isAbstractMappingUniDirectional == null)
setAbstractMappingUniDirectional(conf);
return _isAbstractMappingUniDirectional;
}
public void setAbstractMappingUniDirectional(OpenJPAConfiguration conf) {
_isAbstractMappingUniDirectional = conf.getCompatibilityInstance().isAbstractMappingUniDirectional();
}
@Override
public boolean isNonDefaultMappingAllowed(OpenJPAConfiguration conf) {
if (_isNonDefaultMappingAllowed == null)
setNonDefaultMappingAllowed(conf);
return _isNonDefaultMappingAllowed;
}
public void setNonDefaultMappingAllowed(OpenJPAConfiguration conf) {
_isNonDefaultMappingAllowed = conf.getCompatibilityInstance().
isNonDefaultMappingAllowed();
}
@Override
public Boolean isDefaultCascadePersistEnabled() {
return _isCascadePersistPersistenceUnitDefaultEnabled;
}
@Override
public void setDefaultCascadePersistEnabled(Boolean bool) {
_isCascadePersistPersistenceUnitDefaultEnabled = bool;
}
@Override
public String getDefaultSchema() {
return _defaultSchema;
}
@Override
public void setDefaultSchema(String schema) {
_defaultSchema=schema;
}
}
|
apache/hbase | 37,660 | hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.backup;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
/**
* Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})
* for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on
* the state of the system.
*/
@InterfaceAudience.Private
public class HFileArchiver {
private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);
private static final String SEPARATOR = ".";
/** Number of retries in case of fs operation failure */
private static final int DEFAULT_RETRIES_NUMBER = 3;
private static final Function<File, Path> FUNC_FILE_TO_PATH = new Function<File, Path>() {
@Override
public Path apply(File file) {
return file == null ? null : file.getPath();
}
};
private static ThreadPoolExecutor archiveExecutor;
private HFileArchiver() {
// hidden ctor since this is just a util
}
/** Returns True if the Region exits in the filesystem. */
public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)
throws IOException {
Path rootDir = CommonFSUtils.getRootDir(conf);
Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);
return fs.exists(regionDir);
}
/**
* Cleans up all the files for a HRegion by archiving the HFiles to the archive directory
* @param conf the configuration to use
* @param fs the file system object
* @param info RegionInfo for region to be deleted
*/
public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)
throws IOException {
Path rootDir = CommonFSUtils.getRootDir(conf);
archiveRegion(conf, fs, rootDir, CommonFSUtils.getTableDir(rootDir, info.getTable()),
FSUtils.getRegionDirFromRootDir(rootDir, info));
}
/**
* Cleans up all the files for a HRegion by archiving the HFiles to the archive directory
* @param conf the configuration to use
* @param fs the file system object
* @param info RegionInfo for region to be deleted
* @param rootDir {@link Path} to the root directory where hbase files are stored (for building
* the archive path)
* @param tableDir {@link Path} to where the table is being stored (for building the archive path)
*/
public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info, Path rootDir,
Path tableDir) throws IOException {
archiveRegion(conf, fs, rootDir, tableDir, FSUtils.getRegionDirFromRootDir(rootDir, info));
}
/**
* Remove an entire region from the table directory via archiving the region's hfiles.
* @param fs {@link FileSystem} from which to remove the region
* @param rootdir {@link Path} to the root directory where hbase files are stored (for building
* the archive path)
* @param tableDir {@link Path} to where the table is being stored (for building the archive
* path)
* @param regionDir {@link Path} to where a region is being stored (for building the archive path)
* @return <tt>true</tt> if the region was successfully deleted. <tt>false</tt> if the filesystem
* operations could not complete.
* @throws IOException if the request cannot be completed
*/
public static boolean archiveRegion(Configuration conf, FileSystem fs, Path rootdir,
Path tableDir, Path regionDir) throws IOException {
// otherwise, we archive the files
// make sure we can archive
if (tableDir == null || regionDir == null) {
LOG.error("No archive directory could be found because tabledir (" + tableDir
+ ") or regiondir (" + regionDir + "was null. Deleting files instead.");
if (regionDir != null) {
deleteRegionWithoutArchiving(fs, regionDir);
}
// we should have archived, but failed to. Doesn't matter if we deleted
// the archived files correctly or not.
return false;
}
LOG.debug("ARCHIVING {}", regionDir);
// make sure the regiondir lives under the tabledir
Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));
Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,
CommonFSUtils.getTableName(tableDir), regionDir.getName());
FileStatusConverter getAsFile = new FileStatusConverter(fs);
// otherwise, we attempt to archive the store files
// build collection of just the store directories to archive
Collection<File> toArchive = new ArrayList<>();
final PathFilter dirFilter = new FSUtils.DirFilter(fs);
PathFilter nonHidden = new PathFilter() {
@Override
public boolean accept(Path file) {
return dirFilter.accept(file) && !file.getName().startsWith(".");
}
};
FileStatus[] storeDirs = CommonFSUtils.listStatus(fs, regionDir, nonHidden);
// if there no files, we can just delete the directory and return;
if (storeDirs == null) {
LOG.debug("Directory {} empty.", regionDir);
return deleteRegionWithoutArchiving(fs, regionDir);
}
// convert the files in the region to a File
Stream.of(storeDirs).map(getAsFile).forEachOrdered(toArchive::add);
LOG.debug("Archiving " + toArchive);
List<File> failedArchive = resolveAndArchive(conf, fs, regionArchiveDir, toArchive,
EnvironmentEdgeManager.currentTime());
if (!failedArchive.isEmpty()) {
throw new FailedArchiveException(
"Failed to archive/delete all the files for region:" + regionDir.getName() + " into "
+ regionArchiveDir + ". Something is probably awry on the filesystem.",
failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));
}
// if that was successful, then we delete the region
return deleteRegionWithoutArchiving(fs, regionDir);
}
/**
* Archive the specified regions in parallel.
* @param conf the configuration to use
* @param fs {@link FileSystem} from which to remove the region
* @param rootDir {@link Path} to the root directory where hbase files are stored (for
* building the archive path)
* @param tableDir {@link Path} to where the table is being stored (for building the archive
* path)
* @param regionDirList {@link Path} to where regions are being stored (for building the archive
* path)
* @throws IOException if the request cannot be completed
*/
public static void archiveRegions(Configuration conf, FileSystem fs, Path rootDir, Path tableDir,
List<Path> regionDirList) throws IOException {
List<Future<Void>> futures = new ArrayList<>(regionDirList.size());
for (Path regionDir : regionDirList) {
Future<Void> future = getArchiveExecutor(conf).submit(() -> {
archiveRegion(conf, fs, rootDir, tableDir, regionDir);
return null;
});
futures.add(future);
}
try {
for (Future<Void> future : futures) {
future.get();
}
} catch (InterruptedException e) {
throw new InterruptedIOException(e.getMessage());
} catch (ExecutionException e) {
throw new IOException(e.getCause());
}
}
private static synchronized ThreadPoolExecutor getArchiveExecutor(final Configuration conf) {
if (archiveExecutor == null) {
int maxThreads = conf.getInt("hbase.hfilearchiver.thread.pool.max", 8);
archiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,
getThreadFactory("HFileArchiver"));
// Shutdown this ThreadPool in a shutdown hook
Runtime.getRuntime().addShutdownHook(new Thread(() -> archiveExecutor.shutdown()));
}
return archiveExecutor;
}
// We need this method instead of Threads.getNamedThreadFactory() to pass some tests.
// The difference from Threads.getNamedThreadFactory() is that it doesn't fix ThreadGroup for
// new threads. If we use Threads.getNamedThreadFactory(), we will face ThreadGroup related
// issues in some tests.
private static ThreadFactory getThreadFactory(String archiverName) {
return new ThreadFactory() {
final AtomicInteger threadNumber = new AtomicInteger(1);
@Override
public Thread newThread(Runnable r) {
final String name = archiverName + "-" + threadNumber.getAndIncrement();
Thread t = new Thread(r, name);
t.setDaemon(true);
return t;
}
};
}
/**
* Remove from the specified region the store files of the specified column family, either by
* archiving them or outright deletion
* @param fs the filesystem where the store files live
* @param conf {@link Configuration} to examine to determine the archive directory
* @param parent Parent region hosting the store files
* @param tableDir {@link Path} to where the table is being stored (for building the archive path)
* @param family the family hosting the store files
* @throws IOException if the files could not be correctly disposed.
*/
public static void archiveFamily(FileSystem fs, Configuration conf, RegionInfo parent,
Path tableDir, byte[] family) throws IOException {
Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));
archiveFamilyByFamilyDir(fs, conf, parent, familyDir, family);
}
/**
* Removes from the specified region the store files of the specified column family, either by
* archiving them or outright deletion
* @param fs the filesystem where the store files live
* @param conf {@link Configuration} to examine to determine the archive directory
* @param parent Parent region hosting the store files
* @param familyDir {@link Path} to where the family is being stored
* @param family the family hosting the store files
* @throws IOException if the files could not be correctly disposed.
*/
public static void archiveFamilyByFamilyDir(FileSystem fs, Configuration conf, RegionInfo parent,
Path familyDir, byte[] family) throws IOException {
FileStatus[] storeFiles = CommonFSUtils.listStatus(fs, familyDir);
if (storeFiles == null) {
LOG.debug("No files to dispose of in {}, family={}", parent.getRegionNameAsString(),
Bytes.toString(family));
return;
}
FileStatusConverter getAsFile = new FileStatusConverter(fs);
Collection<File> toArchive = Stream.of(storeFiles).map(getAsFile).collect(Collectors.toList());
Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, family);
// do the actual archive
List<File> failedArchive =
resolveAndArchive(conf, fs, storeArchiveDir, toArchive, EnvironmentEdgeManager.currentTime());
if (!failedArchive.isEmpty()) {
throw new FailedArchiveException(
"Failed to archive/delete all the files for region:"
+ Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family) + " into "
+ storeArchiveDir + ". Something is probably awry on the filesystem.",
failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));
}
}
/**
* Remove the store files, either by archiving them or outright deletion
* @param conf {@link Configuration} to examine to determine the archive directory
* @param fs the filesystem where the store files live
* @param regionInfo {@link RegionInfo} of the region hosting the store files
* @param family the family hosting the store files
* @param compactedFiles files to be disposed of. No further reading of these files should be
* attempted; otherwise likely to cause an {@link IOException}
* @throws IOException if the files could not be correctly disposed.
*/
public static void archiveStoreFiles(Configuration conf, FileSystem fs, RegionInfo regionInfo,
Path tableDir, byte[] family, Collection<HStoreFile> compactedFiles) throws IOException {
Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);
archive(conf, fs, regionInfo, family, compactedFiles, storeArchiveDir);
}
/**
* Archive recovered edits using existing logic for archiving store files. This is currently only
* relevant when <b>hbase.region.archive.recovered.edits</b> is true, as recovered edits shouldn't
* be kept after replay. In theory, we could use very same method available for archiving store
* files, but supporting WAL dir and store files on different FileSystems added the need for extra
* validation of the passed FileSystem instance and the path where the archiving edits should be
* placed.
* @param conf {@link Configuration} to determine the archive directory.
* @param fs the filesystem used for storing WAL files.
* @param regionInfo {@link RegionInfo} a pseudo region representation for the archiving logic.
* @param family a pseudo familiy representation for the archiving logic.
* @param replayedEdits the recovered edits to be archived.
* @throws IOException if files can't be achived due to some internal error.
*/
public static void archiveRecoveredEdits(Configuration conf, FileSystem fs, RegionInfo regionInfo,
byte[] family, Collection<HStoreFile> replayedEdits) throws IOException {
String workingDir = conf.get(CommonFSUtils.HBASE_WAL_DIR, conf.get(HConstants.HBASE_DIR));
// extra sanity checks for the right FS
Path path = new Path(workingDir);
if (path.isAbsoluteAndSchemeAuthorityNull()) {
// no schema specified on wal dir value, so it's on same FS as StoreFiles
path = new Path(conf.get(HConstants.HBASE_DIR));
}
if (path.toUri().getScheme() != null && !path.toUri().getScheme().equals(fs.getScheme())) {
throw new IOException(
"Wrong file system! Should be " + path.toUri().getScheme() + ", but got " + fs.getScheme());
}
path = HFileArchiveUtil.getStoreArchivePathForRootDir(path, regionInfo, family);
archive(conf, fs, regionInfo, family, replayedEdits, path);
}
private static void archive(Configuration conf, FileSystem fs, RegionInfo regionInfo,
byte[] family, Collection<HStoreFile> compactedFiles, Path storeArchiveDir) throws IOException {
// sometimes in testing, we don't have rss, so we need to check for that
if (fs == null) {
LOG.warn(
"Passed filesystem is null, so just deleting files without archiving for {}," + "family={}",
Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));
deleteStoreFilesWithoutArchiving(compactedFiles);
return;
}
// short circuit if we don't have any files to delete
if (compactedFiles.isEmpty()) {
LOG.debug("No files to dispose of, done!");
return;
}
// build the archive path
if (regionInfo == null || family == null)
throw new IOException("Need to have a region and a family to archive from.");
// make sure we don't archive if we can't and that the archive dir exists
if (!fs.mkdirs(storeArchiveDir)) {
throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"
+ Bytes.toString(family) + ", deleting compacted files instead.");
}
// otherwise we attempt to archive the store files
LOG.debug("Archiving compacted files.");
// Wrap the storefile into a File
StoreToFile getStorePath = new StoreToFile(fs);
Collection<File> storeFiles =
compactedFiles.stream().map(getStorePath).collect(Collectors.toList());
// do the actual archive
List<File> failedArchive = resolveAndArchive(conf, fs, storeArchiveDir, storeFiles,
EnvironmentEdgeManager.currentTime());
if (!failedArchive.isEmpty()) {
throw new FailedArchiveException(
"Failed to archive/delete all the files for region:"
+ Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family)
+ " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",
failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));
}
}
/**
* Archive the store file
* @param fs the filesystem where the store files live
* @param regionInfo region hosting the store files
* @param conf {@link Configuration} to examine to determine the archive directory
* @param tableDir {@link Path} to where the table is being stored (for building the archive
* path)
* @param family the family hosting the store files
* @param storeFile file to be archived
* @throws IOException if the files could not be correctly disposed.
*/
public static void archiveStoreFile(Configuration conf, FileSystem fs, RegionInfo regionInfo,
Path tableDir, byte[] family, Path storeFile) throws IOException {
Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);
// make sure we don't archive if we can't and that the archive dir exists
if (!fs.mkdirs(storeArchiveDir)) {
throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"
+ Bytes.toString(family) + ", deleting compacted files instead.");
}
// do the actual archive
long start = EnvironmentEdgeManager.currentTime();
File file = new FileablePath(fs, storeFile);
if (!resolveAndArchiveFile(storeArchiveDir, file, Long.toString(start))) {
throw new IOException("Failed to archive/delete the file for region:"
+ regionInfo.getRegionNameAsString() + ", family:" + Bytes.toString(family) + " into "
+ storeArchiveDir + ". Something is probably awry on the filesystem.");
}
}
/**
* Resolve any conflict with an existing archive file via timestamp-append renaming of the
* existing file and then archive the passed in files.
* @param fs {@link FileSystem} on which to archive the files
* @param baseArchiveDir base archive directory to store the files. If any of the files to archive
* are directories, will append the name of the directory to the base
* archive directory name, creating a parallel structure.
* @param toArchive files/directories that need to be archvied
* @param start time the archiving started - used for resolving archive conflicts.
* @return the list of failed to archive files.
* @throws IOException if an unexpected file operation exception occurred
*/
private static List<File> resolveAndArchive(Configuration conf, FileSystem fs,
Path baseArchiveDir, Collection<File> toArchive, long start) throws IOException {
// Early exit if no files to archive
if (toArchive.isEmpty()) {
LOG.trace("No files to archive, returning an empty list.");
return Collections.emptyList();
}
LOG.trace("Preparing to archive files into directory: {}", baseArchiveDir);
// Ensure the archive directory exists
ensureArchiveDirectoryExists(fs, baseArchiveDir);
// Thread-safe collection for storing failures
Queue<File> failures = new ConcurrentLinkedQueue<>();
String startTime = Long.toString(start);
// Separate files and directories for processing
List<File> filesOnly = new ArrayList<>();
for (File file : toArchive) {
if (file.isFile()) {
filesOnly.add(file);
} else {
handleDirectory(conf, fs, baseArchiveDir, failures, file, start);
}
}
// Archive files concurrently
archiveFilesConcurrently(conf, baseArchiveDir, filesOnly, failures, startTime);
return new ArrayList<>(failures); // Convert to a List for the return value
}
private static void ensureArchiveDirectoryExists(FileSystem fs, Path baseArchiveDir)
throws IOException {
if (!fs.exists(baseArchiveDir) && !fs.mkdirs(baseArchiveDir)) {
throw new IOException("Failed to create the archive directory: " + baseArchiveDir);
}
LOG.trace("Archive directory ready: {}", baseArchiveDir);
}
private static void handleDirectory(Configuration conf, FileSystem fs, Path baseArchiveDir,
Queue<File> failures, File directory, long start) {
LOG.trace("Processing directory: {}, archiving its children.", directory);
Path subArchiveDir = new Path(baseArchiveDir, directory.getName());
try {
Collection<File> children = directory.getChildren();
failures.addAll(resolveAndArchive(conf, fs, subArchiveDir, children, start));
} catch (IOException e) {
LOG.warn("Failed to archive directory: {}", directory, e);
failures.add(directory);
}
}
private static void archiveFilesConcurrently(Configuration conf, Path baseArchiveDir,
List<File> files, Queue<File> failures, String startTime) {
LOG.trace("Archiving {} files concurrently into directory: {}", files.size(), baseArchiveDir);
Map<File, Future<Boolean>> futureMap = new HashMap<>();
// Submit file archiving tasks
// default is 16 which comes equal hbase.hstore.blockingStoreFiles default value
int maxThreads = conf.getInt("hbase.hfilearchiver.per.region.thread.pool.max", 16);
ThreadPoolExecutor hfilesArchiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L,
TimeUnit.SECONDS, getThreadFactory("HFileArchiverPerRegion-"));
try {
for (File file : files) {
Future<Boolean> future = hfilesArchiveExecutor
.submit(() -> resolveAndArchiveFile(baseArchiveDir, file, startTime));
futureMap.put(file, future);
}
// Process results of each task
for (Map.Entry<File, Future<Boolean>> entry : futureMap.entrySet()) {
File file = entry.getKey();
try {
if (!entry.getValue().get()) {
LOG.warn("Failed to archive file: {} into directory: {}", file, baseArchiveDir);
failures.add(file);
}
} catch (InterruptedException e) {
LOG.error("Archiving interrupted for file: {}", file, e);
Thread.currentThread().interrupt(); // Restore interrupt status
failures.add(file);
} catch (ExecutionException e) {
LOG.error("Archiving failed for file: {}", file, e);
failures.add(file);
}
}
} finally {
hfilesArchiveExecutor.shutdown();
}
}
/**
* Attempt to archive the passed in file to the archive directory.
* <p>
* If the same file already exists in the archive, it is moved to a timestamped directory under
* the archive directory and the new file is put in its place.
* @param archiveDir {@link Path} to the directory that stores the archives of the hfiles
* @param currentFile {@link Path} to the original HFile that will be archived
* @param archiveStartTime time the archiving started, to resolve naming conflicts
* @return <tt>true</tt> if the file is successfully archived. <tt>false</tt> if there was a
* problem, but the operation still completed.
* @throws IOException on failure to complete {@link FileSystem} operations.
*/
private static boolean resolveAndArchiveFile(Path archiveDir, File currentFile,
String archiveStartTime) throws IOException {
// build path as it should be in the archive
String filename = currentFile.getName();
Path archiveFile = new Path(archiveDir, filename);
FileSystem fs = currentFile.getFileSystem();
// An existing destination file in the archive is unexpected, but we handle it here.
if (fs.exists(archiveFile)) {
if (!fs.exists(currentFile.getPath())) {
// If the file already exists in the archive, and there is no current file to archive, then
// assume that the file in archive is correct. This is an unexpected situation, suggesting a
// race condition or split brain.
// In HBASE-26718 this was found when compaction incorrectly happened during warmupRegion.
LOG.warn("{} exists in archive. Attempted to archive nonexistent file {}.", archiveFile,
currentFile);
// We return success to match existing behavior in this method, where FileNotFoundException
// in moveAndClose is ignored.
return true;
}
// There is a conflict between the current file and the already existing archived file.
// Move the archived file to a timestamped backup. This is a really, really unlikely
// situation, where we get the same name for the existing file, but is included just for that
// 1 in trillion chance. We are potentially incurring data loss in the archive directory if
// the files are not identical. The timestamped backup will be cleaned by HFileCleaner as it
// has no references.
FileStatus curStatus = fs.getFileStatus(currentFile.getPath());
FileStatus archiveStatus = fs.getFileStatus(archiveFile);
long curLen = curStatus.getLen();
long archiveLen = archiveStatus.getLen();
long curMtime = curStatus.getModificationTime();
long archiveMtime = archiveStatus.getModificationTime();
if (curLen != archiveLen) {
LOG.error(
"{} already exists in archive with different size than current {}."
+ " archiveLen: {} currentLen: {} archiveMtime: {} currentMtime: {}",
archiveFile, currentFile, archiveLen, curLen, archiveMtime, curMtime);
throw new IOException(
archiveFile + " already exists in archive with different size" + " than " + currentFile);
}
LOG.error(
"{} already exists in archive, moving to timestamped backup and overwriting"
+ " current {}. archiveLen: {} currentLen: {} archiveMtime: {} currentMtime: {}",
archiveFile, currentFile, archiveLen, curLen, archiveMtime, curMtime);
// move the archive file to the stamped backup
Path backedupArchiveFile = new Path(archiveDir, filename + SEPARATOR + archiveStartTime);
if (!fs.rename(archiveFile, backedupArchiveFile)) {
LOG.error("Could not rename archive file to backup: " + backedupArchiveFile
+ ", deleting existing file in favor of newer.");
// try to delete the existing file, if we can't rename it
if (!fs.delete(archiveFile, false)) {
throw new IOException("Couldn't delete existing archive file (" + archiveFile
+ ") or rename it to the backup file (" + backedupArchiveFile
+ ") to make room for similarly named file.");
}
} else {
LOG.info("Backed up archive file from {} to {}.", archiveFile, backedupArchiveFile);
}
}
LOG.trace("No existing file in archive for {}, free to archive original file.", archiveFile);
// at this point, we should have a free spot for the archive file
boolean success = false;
for (int i = 0; !success && i < DEFAULT_RETRIES_NUMBER; ++i) {
if (i > 0) {
// Ensure that the archive directory exists.
// The previous "move to archive" operation has failed probably because
// the cleaner has removed our archive directory (HBASE-7643).
// (we're in a retry loop, so don't worry too much about the exception)
try {
if (!fs.exists(archiveDir)) {
if (fs.mkdirs(archiveDir)) {
LOG.debug("Created archive directory {}", archiveDir);
}
}
} catch (IOException e) {
LOG.warn("Failed to create directory {}", archiveDir, e);
}
}
try {
success = currentFile.moveAndClose(archiveFile);
} catch (FileNotFoundException fnfe) {
LOG.warn("Failed to archive " + currentFile
+ " because it does not exist! Skipping and continuing on.", fnfe);
success = true;
} catch (IOException e) {
success = false;
// When HFiles are placed on a filesystem other than HDFS a rename operation can be a
// non-atomic file copy operation. It can take a long time to copy a large hfile and if
// interrupted there may be a partially copied file present at the destination. We must
// remove the partially copied file, if any, or otherwise the archive operation will fail
// indefinitely from this point.
LOG.warn("Failed to archive " + currentFile + " on try #" + i, e);
try {
fs.delete(archiveFile, false);
} catch (FileNotFoundException fnfe) {
// This case is fine.
} catch (IOException ee) {
// Complain about other IO exceptions
LOG.warn("Failed to clean up from failure to archive " + currentFile + " on try #" + i,
ee);
}
}
}
if (!success) {
LOG.error("Failed to archive " + currentFile);
return false;
}
LOG.debug("Archived from {} to {}", currentFile, archiveFile);
return true;
}
/**
* Without regard for backup, delete a region. Should be used with caution.
* @param regionDir {@link Path} to the region to be deleted.
* @param fs FileSystem from which to delete the region
* @return <tt>true</tt> on successful deletion, <tt>false</tt> otherwise
* @throws IOException on filesystem operation failure
*/
private static boolean deleteRegionWithoutArchiving(FileSystem fs, Path regionDir)
throws IOException {
if (fs.delete(regionDir, true)) {
LOG.debug("Deleted {}", regionDir);
return true;
}
LOG.debug("Failed to delete directory {}", regionDir);
return false;
}
/**
* Just do a simple delete of the given store files
* <p>
* A best effort is made to delete each of the files, rather than bailing on the first failure.
* <p>
* @param compactedFiles store files to delete from the file system.
* @throws IOException if a file cannot be deleted. All files will be attempted to deleted before
* throwing the exception, rather than failing at the first file.
*/
private static void deleteStoreFilesWithoutArchiving(Collection<HStoreFile> compactedFiles)
throws IOException {
LOG.debug("Deleting files without archiving.");
List<IOException> errors = new ArrayList<>(0);
for (HStoreFile hsf : compactedFiles) {
try {
hsf.deleteStoreFile();
} catch (IOException e) {
LOG.error("Failed to delete {}", hsf.getPath());
errors.add(e);
}
}
if (errors.size() > 0) {
throw MultipleIOException.createIOException(errors);
}
}
/**
* Adapt a type to match the {@link File} interface, which is used internally for handling
* archival/removal of files
* @param <T> type to adapt to the {@link File} interface
*/
private static abstract class FileConverter<T> implements Function<T, File> {
protected final FileSystem fs;
public FileConverter(FileSystem fs) {
this.fs = fs;
}
}
/**
* Convert a FileStatus to something we can manage in the archiving
*/
private static class FileStatusConverter extends FileConverter<FileStatus> {
public FileStatusConverter(FileSystem fs) {
super(fs);
}
@Override
public File apply(FileStatus input) {
return new FileablePath(fs, input.getPath());
}
}
/**
* Convert the {@link HStoreFile} into something we can manage in the archive methods
*/
private static class StoreToFile extends FileConverter<HStoreFile> {
public StoreToFile(FileSystem fs) {
super(fs);
}
@Override
public File apply(HStoreFile input) {
return new FileableStoreFile(fs, input);
}
}
/**
* Wrapper to handle file operations uniformly
*/
private static abstract class File {
protected final FileSystem fs;
public File(FileSystem fs) {
this.fs = fs;
}
/**
* Delete the file
* @throws IOException on failure
*/
abstract void delete() throws IOException;
/**
* Check to see if this is a file or a directory
* @return <tt>true</tt> if it is a file, <tt>false</tt> otherwise
* @throws IOException on {@link FileSystem} connection error
*/
abstract boolean isFile() throws IOException;
/**
* @return if this is a directory, returns all the children in the directory, otherwise returns
* an empty list
*/
abstract Collection<File> getChildren() throws IOException;
/**
* close any outside readers of the file
*/
abstract void close() throws IOException;
/** Returns the name of the file (not the full fs path, just the individual file name) */
abstract String getName();
/** Returns the path to this file */
abstract Path getPath();
/**
* Move the file to the given destination
* @return <tt>true</tt> on success
*/
public boolean moveAndClose(Path dest) throws IOException {
this.close();
Path p = this.getPath();
return CommonFSUtils.renameAndSetModifyTime(fs, p, dest);
}
/** Returns the {@link FileSystem} on which this file resides */
public FileSystem getFileSystem() {
return this.fs;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + ", " + getPath().toString();
}
}
/**
* A {@link File} that wraps a simple {@link Path} on a {@link FileSystem}.
*/
private static class FileablePath extends File {
private final Path file;
private final FileStatusConverter getAsFile;
public FileablePath(FileSystem fs, Path file) {
super(fs);
this.file = file;
this.getAsFile = new FileStatusConverter(fs);
}
@Override
public void delete() throws IOException {
if (!fs.delete(file, true)) throw new IOException("Failed to delete:" + this.file);
}
@Override
public String getName() {
return file.getName();
}
@Override
public Collection<File> getChildren() throws IOException {
if (fs.isFile(file)) {
return Collections.emptyList();
}
return Stream.of(fs.listStatus(file)).map(getAsFile).collect(Collectors.toList());
}
@Override
public boolean isFile() throws IOException {
return fs.isFile(file);
}
@Override
public void close() throws IOException {
// NOOP - files are implicitly closed on removal
}
@Override
Path getPath() {
return file;
}
}
/**
* {@link File} adapter for a {@link HStoreFile} living on a {@link FileSystem} .
*/
private static class FileableStoreFile extends File {
HStoreFile file;
public FileableStoreFile(FileSystem fs, HStoreFile store) {
super(fs);
this.file = store;
}
@Override
public void delete() throws IOException {
file.deleteStoreFile();
}
@Override
public String getName() {
return file.getPath().getName();
}
@Override
public boolean isFile() {
return true;
}
@Override
public Collection<File> getChildren() throws IOException {
// storefiles don't have children
return Collections.emptyList();
}
@Override
public void close() throws IOException {
file.closeStoreFile(true);
}
@Override
Path getPath() {
return file.getPath();
}
}
}
|
google/guava | 37,717 | android/guava/src/com/google/common/collect/ImmutableSortedSet.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.ImmutableList.asImmutableList;
import static com.google.common.collect.ObjectArrays.checkElementsNotNull;
import static java.lang.System.arraycopy;
import static java.util.Arrays.sort;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.DoNotCall;
import com.google.errorprone.annotations.concurrent.LazyInit;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NavigableSet;
import java.util.SortedSet;
import java.util.stream.Collector;
import org.jspecify.annotations.Nullable;
/**
* A {@link NavigableSet} whose contents will never change, with many other important properties
* detailed at {@link ImmutableCollection}.
*
* <p><b>Warning:</b> as with any sorted collection, you are strongly advised not to use a {@link
* Comparator} or {@link Comparable} type whose comparison behavior is <i>inconsistent with
* equals</i>. That is, {@code a.compareTo(b)} or {@code comparator.compare(a, b)} should equal zero
* <i>if and only if</i> {@code a.equals(b)}. If this advice is not followed, the resulting
* collection will not correctly obey its specification.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/ImmutableCollectionsExplained">immutable collections</a>.
*
* @author Jared Levy
* @author Louis Wasserman
* @since 2.0 (implements {@code NavigableSet} since 12.0)
*/
// TODO(benyu): benchmark and optimize all creation paths, which are a mess now
@GwtCompatible
@SuppressWarnings("serial") // we're overriding default serialization
public abstract class ImmutableSortedSet<E> extends ImmutableSet<E>
implements NavigableSet<E>, SortedIterable<E> {
/**
* Returns a {@code Collector} that accumulates the input elements into a new {@code
* ImmutableSortedSet}, ordered by the specified comparator.
*
* <p>If the elements contain duplicates (according to the comparator), only the first duplicate
* in encounter order will appear in the result.
*
* @since 33.2.0 (available since 21.0 in guava-jre)
*/
@IgnoreJRERequirement // Users will use this only if they're already using streams.
public static <E> Collector<E, ?, ImmutableSortedSet<E>> toImmutableSortedSet(
Comparator<? super E> comparator) {
return CollectCollectors.toImmutableSortedSet(comparator);
}
static <E> RegularImmutableSortedSet<E> emptySet(Comparator<? super E> comparator) {
if (Ordering.natural().equals(comparator)) {
@SuppressWarnings("unchecked") // The natural-ordered empty set supports all types.
RegularImmutableSortedSet<E> result =
(RegularImmutableSortedSet<E>) RegularImmutableSortedSet.NATURAL_EMPTY_SET;
return result;
} else {
return new RegularImmutableSortedSet<>(ImmutableList.of(), comparator);
}
}
/**
* Returns the empty immutable sorted set.
*
* <p><b>Performance note:</b> the instance returned is a singleton.
*/
@SuppressWarnings("unchecked") // The natural-ordered empty set supports all types.
public static <E> ImmutableSortedSet<E> of() {
return (ImmutableSortedSet<E>) RegularImmutableSortedSet.NATURAL_EMPTY_SET;
}
/** Returns an immutable sorted set containing a single element. */
public static <E extends Comparable<? super E>> ImmutableSortedSet<E> of(E e1) {
return new RegularImmutableSortedSet<>(ImmutableList.of(e1), Ordering.natural());
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@link Comparable#compareTo}, only the first
* one specified is included.
*
* @throws NullPointerException if any element is null
*/
public static <E extends Comparable<? super E>> ImmutableSortedSet<E> of(E e1, E e2) {
return construct(Ordering.natural(), 2, e1, e2);
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@link Comparable#compareTo}, only the first
* one specified is included.
*
* @throws NullPointerException if any element is null
*/
public static <E extends Comparable<? super E>> ImmutableSortedSet<E> of(E e1, E e2, E e3) {
return construct(Ordering.natural(), 3, e1, e2, e3);
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@link Comparable#compareTo}, only the first
* one specified is included.
*
* @throws NullPointerException if any element is null
*/
public static <E extends Comparable<? super E>> ImmutableSortedSet<E> of(E e1, E e2, E e3, E e4) {
return construct(Ordering.natural(), 4, e1, e2, e3, e4);
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@link Comparable#compareTo}, only the first
* one specified is included.
*
* @throws NullPointerException if any element is null
*/
public static <E extends Comparable<? super E>> ImmutableSortedSet<E> of(
E e1, E e2, E e3, E e4, E e5) {
return construct(Ordering.natural(), 5, e1, e2, e3, e4, e5);
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@link Comparable#compareTo}, only the first
* one specified is included.
*
* @throws NullPointerException if any element is null
* @since 3.0 (source-compatible since 2.0)
*/
@SuppressWarnings("unchecked")
public static <E extends Comparable<? super E>> ImmutableSortedSet<E> of(
E e1, E e2, E e3, E e4, E e5, E e6, E... remaining) {
Comparable<?>[] contents = new Comparable<?>[6 + remaining.length];
contents[0] = e1;
contents[1] = e2;
contents[2] = e3;
contents[3] = e4;
contents[4] = e5;
contents[5] = e6;
arraycopy(remaining, 0, contents, 6, remaining.length);
return construct(Ordering.natural(), contents.length, (E[]) contents);
}
// TODO(kevinb): Consider factory methods that reject duplicates
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@link Comparable#compareTo}, only the first
* one specified is included.
*
* @throws NullPointerException if any of {@code elements} is null
* @since 3.0
*/
public static <E extends Comparable<? super E>> ImmutableSortedSet<E> copyOf(E[] elements) {
return construct(Ordering.natural(), elements.length, elements.clone());
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@code compareTo()}, only the first one
* specified is included. To create a copy of a {@code SortedSet} that preserves the comparator,
* call {@link #copyOfSorted} instead. This method iterates over {@code elements} at most once.
*
* <p>Note that if {@code s} is a {@code Set<String>}, then {@code ImmutableSortedSet.copyOf(s)}
* returns an {@code ImmutableSortedSet<String>} containing each of the strings in {@code s},
* while {@code ImmutableSortedSet.of(s)} returns an {@code ImmutableSortedSet<Set<String>>}
* containing one element (the given set itself).
*
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
* safe to do so. The exact circumstances under which a copy will or will not be performed are
* undocumented and subject to change.
*
* <p>This method is not type-safe, as it may be called on elements that are not mutually
* comparable.
*
* @throws ClassCastException if the elements are not mutually comparable
* @throws NullPointerException if any of {@code elements} is null
*/
public static <E> ImmutableSortedSet<E> copyOf(Iterable<? extends E> elements) {
// Hack around E not being a subtype of Comparable.
// Unsafe, see ImmutableSortedSetFauxverideShim.
@SuppressWarnings("unchecked")
Ordering<E> naturalOrder = (Ordering<E>) Ordering.<Comparable<?>>natural();
return copyOf(naturalOrder, elements);
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@code compareTo()}, only the first one
* specified is included. To create a copy of a {@code SortedSet} that preserves the comparator,
* call {@link #copyOfSorted} instead. This method iterates over {@code elements} at most once.
*
* <p>Note that if {@code s} is a {@code Set<String>}, then {@code ImmutableSortedSet.copyOf(s)}
* returns an {@code ImmutableSortedSet<String>} containing each of the strings in {@code s},
* while {@code ImmutableSortedSet.of(s)} returns an {@code ImmutableSortedSet<Set<String>>}
* containing one element (the given set itself).
*
* <p><b>Note:</b> Despite what the method name suggests, if {@code elements} is an {@code
* ImmutableSortedSet}, it may be returned instead of a copy.
*
* <p>This method is not type-safe, as it may be called on elements that are not mutually
* comparable.
*
* <p>This method is safe to use even when {@code elements} is a synchronized or concurrent
* collection that is currently being modified by another thread.
*
* @throws ClassCastException if the elements are not mutually comparable
* @throws NullPointerException if any of {@code elements} is null
* @since 7.0 (source-compatible since 2.0)
*/
public static <E> ImmutableSortedSet<E> copyOf(Collection<? extends E> elements) {
// Hack around E not being a subtype of Comparable.
// Unsafe, see ImmutableSortedSetFauxverideShim.
@SuppressWarnings("unchecked")
Ordering<E> naturalOrder = (Ordering<E>) Ordering.<Comparable<?>>natural();
return copyOf(naturalOrder, elements);
}
/**
* Returns an immutable sorted set containing the given elements sorted by their natural ordering.
* When multiple elements are equivalent according to {@code compareTo()}, only the first one
* specified is included.
*
* <p>This method is not type-safe, as it may be called on elements that are not mutually
* comparable.
*
* @throws ClassCastException if the elements are not mutually comparable
* @throws NullPointerException if any of {@code elements} is null
*/
public static <E> ImmutableSortedSet<E> copyOf(Iterator<? extends E> elements) {
// Hack around E not being a subtype of Comparable.
// Unsafe, see ImmutableSortedSetFauxverideShim.
@SuppressWarnings("unchecked")
Ordering<E> naturalOrder = (Ordering<E>) Ordering.<Comparable<?>>natural();
return copyOf(naturalOrder, elements);
}
/**
* Returns an immutable sorted set containing the given elements sorted by the given {@code
* Comparator}. When multiple elements are equivalent according to {@code compareTo()}, only the
* first one specified is included.
*
* @throws NullPointerException if {@code comparator} or any of {@code elements} is null
*/
public static <E> ImmutableSortedSet<E> copyOf(
Comparator<? super E> comparator, Iterator<? extends E> elements) {
return new Builder<E>(comparator).addAll(elements).build();
}
/**
* Returns an immutable sorted set containing the given elements sorted by the given {@code
* Comparator}. When multiple elements are equivalent according to {@code compare()}, only the
* first one specified is included. This method iterates over {@code elements} at most once.
*
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
* safe to do so. The exact circumstances under which a copy will or will not be performed are
* undocumented and subject to change.
*
* @throws NullPointerException if {@code comparator} or any of {@code elements} is null
*/
public static <E> ImmutableSortedSet<E> copyOf(
Comparator<? super E> comparator, Iterable<? extends E> elements) {
checkNotNull(comparator);
boolean hasSameComparator = SortedIterables.hasSameComparator(comparator, elements);
if (hasSameComparator && (elements instanceof ImmutableSortedSet)) {
@SuppressWarnings("unchecked")
ImmutableSortedSet<E> original = (ImmutableSortedSet<E>) elements;
if (!original.isPartialView()) {
return original;
}
}
@SuppressWarnings("unchecked") // elements only contains E's; it's safe.
E[] array = (E[]) Iterables.toArray(elements);
return construct(comparator, array.length, array);
}
/**
* Returns an immutable sorted set containing the given elements sorted by the given {@code
* Comparator}. When multiple elements are equivalent according to {@code compareTo()}, only the
* first one specified is included.
*
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
* safe to do so. The exact circumstances under which a copy will or will not be performed are
* undocumented and subject to change.
*
* <p>This method is safe to use even when {@code elements} is a synchronized or concurrent
* collection that is currently being modified by another thread.
*
* @throws NullPointerException if {@code comparator} or any of {@code elements} is null
* @since 7.0 (source-compatible since 2.0)
*/
public static <E> ImmutableSortedSet<E> copyOf(
Comparator<? super E> comparator, Collection<? extends E> elements) {
return copyOf(comparator, (Iterable<? extends E>) elements);
}
/**
* Returns an immutable sorted set containing the elements of a sorted set, sorted by the same
* {@code Comparator}. That behavior differs from {@link #copyOf(Iterable)}, which always uses the
* natural ordering of the elements.
*
* <p>Despite the method name, this method attempts to avoid actually copying the data when it is
* safe to do so. The exact circumstances under which a copy will or will not be performed are
* undocumented and subject to change.
*
* <p>This method is safe to use even when {@code sortedSet} is a synchronized or concurrent
* collection that is currently being modified by another thread.
*
* @throws NullPointerException if {@code sortedSet} or any of its elements is null
*/
public static <E> ImmutableSortedSet<E> copyOfSorted(SortedSet<E> sortedSet) {
Comparator<? super E> comparator = SortedIterables.comparator(sortedSet);
ImmutableList<E> list = ImmutableList.copyOf(sortedSet);
if (list.isEmpty()) {
return emptySet(comparator);
} else {
return new RegularImmutableSortedSet<>(list, comparator);
}
}
/**
* Constructs an {@code ImmutableSortedSet} from the first {@code n} elements of {@code contents}.
* If {@code k} is the size of the returned {@code ImmutableSortedSet}, then the sorted unique
* elements are in the first {@code k} positions of {@code contents}, and {@code contents[i] ==
* null} for {@code k <= i < n}.
*
* <p>This method takes ownership of {@code contents}; do not modify {@code contents} after this
* returns.
*
* @throws NullPointerException if any of the first {@code n} elements of {@code contents} is null
*/
static <E> ImmutableSortedSet<E> construct(
Comparator<? super E> comparator, int n, E... contents) {
if (n == 0) {
return emptySet(comparator);
}
checkElementsNotNull(contents, n);
sort(contents, 0, n, comparator);
int uniques = 1;
for (int i = 1; i < n; i++) {
E cur = contents[i];
E prev = contents[uniques - 1];
if (comparator.compare(cur, prev) != 0) {
contents[uniques++] = cur;
}
}
Arrays.fill(contents, uniques, n, null);
if (uniques < contents.length / 2) {
// Deduplication eliminated many of the elements. We don't want to retain an arbitrarily
// large array relative to the number of elements, so we cap the ratio.
contents = Arrays.copyOf(contents, uniques);
}
return new RegularImmutableSortedSet<>(asImmutableList(contents, uniques), comparator);
}
/**
* Returns a builder that creates immutable sorted sets with an explicit comparator. If the
* comparator has a more general type than the set being generated, such as creating a {@code
* SortedSet<Integer>} with a {@code Comparator<Number>}, use the {@link Builder} constructor
* instead.
*
* @throws NullPointerException if {@code comparator} is null
*/
public static <E> Builder<E> orderedBy(Comparator<E> comparator) {
return new Builder<>(comparator);
}
/**
* Returns a builder that creates immutable sorted sets whose elements are ordered by the reverse
* of their natural ordering.
*/
public static <E extends Comparable<?>> Builder<E> reverseOrder() {
return new Builder<>(Collections.reverseOrder());
}
/**
* Returns a builder that creates immutable sorted sets whose elements are ordered by their
* natural ordering. The sorted sets use {@link Ordering#natural()} as the comparator. This method
* provides more type-safety than {@link #builder}, as it can be called only for classes that
* implement {@link Comparable}.
*/
public static <E extends Comparable<?>> Builder<E> naturalOrder() {
return new Builder<>(Ordering.natural());
}
/**
* A builder for creating immutable sorted set instances, especially {@code public static final}
* sets ("constant sets"), with a given comparator. Example:
*
* {@snippet :
* public static final ImmutableSortedSet<Number> LUCKY_NUMBERS =
* new ImmutableSortedSet.Builder<Number>(ODDS_FIRST_COMPARATOR)
* .addAll(SINGLE_DIGIT_PRIMES)
* .add(42)
* .build();
* }
*
* <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build
* multiple sets in series. Each set is a superset of the set created before it.
*
* @since 2.0
*/
public static final class Builder<E> extends ImmutableSet.Builder<E> {
private final Comparator<? super E> comparator;
/**
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
* ImmutableSortedSet#orderedBy}.
*/
/*
* TODO(cpovirk): use Object[] instead of E[] in the mainline? (The backport is different and
* doesn't need this suppression, but we keep it to minimize diffs.) Generally be more clear
* about when we have an Object[] vs. a Comparable[] or other array type in internalArray? If we
* used Object[], we might be able to optimize toArray() to use clone() sometimes. (See
* cl/592273615 and cl/592273683.)
*/
public Builder(Comparator<? super E> comparator) {
this.comparator = checkNotNull(comparator);
}
Builder(Comparator<? super E> comparator, int expectedKeys) {
super(expectedKeys, false);
this.comparator = checkNotNull(comparator);
}
/**
* Adds {@code element} to the {@code ImmutableSortedSet}. If the {@code ImmutableSortedSet}
* already contains {@code element}, then {@code add} has no effect. (only the previously added
* element is retained).
*
* @param element the element to add
* @return this {@code Builder} object
* @throws NullPointerException if {@code element} is null
*/
@CanIgnoreReturnValue
@Override
public Builder<E> add(E element) {
super.add(element);
return this;
}
/**
* Adds each element of {@code elements} to the {@code ImmutableSortedSet}, ignoring duplicate
* elements (only the first duplicate element is added).
*
* @param elements the elements to add
* @return this {@code Builder} object
* @throws NullPointerException if {@code elements} contains a null element
*/
@CanIgnoreReturnValue
@Override
public Builder<E> add(E... elements) {
super.add(elements);
return this;
}
/**
* Adds each element of {@code elements} to the {@code ImmutableSortedSet}, ignoring duplicate
* elements (only the first duplicate element is added).
*
* @param elements the elements to add to the {@code ImmutableSortedSet}
* @return this {@code Builder} object
* @throws NullPointerException if {@code elements} contains a null element
*/
@CanIgnoreReturnValue
@Override
public Builder<E> addAll(Iterable<? extends E> elements) {
super.addAll(elements);
return this;
}
/**
* Adds each element of {@code elements} to the {@code ImmutableSortedSet}, ignoring duplicate
* elements (only the first duplicate element is added).
*
* @param elements the elements to add to the {@code ImmutableSortedSet}
* @return this {@code Builder} object
* @throws NullPointerException if {@code elements} contains a null element
*/
@CanIgnoreReturnValue
@Override
public Builder<E> addAll(Iterator<? extends E> elements) {
super.addAll(elements);
return this;
}
@CanIgnoreReturnValue
@Override
Builder<E> combine(ImmutableSet.Builder<E> builder) {
super.combine(builder);
return this;
}
/**
* Returns a newly-created {@code ImmutableSortedSet} based on the contents of the {@code
* Builder} and its comparator.
*/
@Override
public ImmutableSortedSet<E> build() {
@SuppressWarnings("unchecked") // we're careful to put only E's in here
E[] contentsArray = (E[]) contents;
ImmutableSortedSet<E> result = construct(comparator, size, contentsArray);
this.size = result.size(); // we eliminated duplicates in-place in contentsArray
this.forceCopy = true;
return result;
}
}
int unsafeCompare(Object a, @Nullable Object b) {
return unsafeCompare(comparator, a, b);
}
static int unsafeCompare(Comparator<?> comparator, Object a, @Nullable Object b) {
// Pretend the comparator can compare anything. If it turns out it can't
// compare a and b, we should get a CCE or NPE on the subsequent line. Only methods
// that are spec'd to throw CCE and NPE should call this.
@SuppressWarnings({"unchecked", "nullness"})
Comparator<@Nullable Object> unsafeComparator = (Comparator<@Nullable Object>) comparator;
return unsafeComparator.compare(a, b);
}
final transient Comparator<? super E> comparator;
ImmutableSortedSet(Comparator<? super E> comparator) {
this.comparator = comparator;
}
/**
* Returns the comparator that orders the elements, which is {@link Ordering#natural()} when the
* natural ordering of the elements is used. Note that its behavior is not consistent with {@link
* SortedSet#comparator()}, which returns {@code null} to indicate natural ordering.
*/
@Override
public Comparator<? super E> comparator() {
return comparator;
}
@Override // needed to unify the iterator() methods in Collection and SortedIterable
public abstract UnmodifiableIterator<E> iterator();
/**
* {@inheritDoc}
*
* <p>This method returns a serializable {@code ImmutableSortedSet}.
*
* <p>The {@link SortedSet#headSet} documentation states that a subset of a subset throws an
* {@link IllegalArgumentException} if passed a {@code toElement} greater than an earlier {@code
* toElement}. However, this method doesn't throw an exception in that situation, but instead
* keeps the original {@code toElement}.
*/
@Override
public ImmutableSortedSet<E> headSet(E toElement) {
return headSet(toElement, false);
}
/**
* @since 12.0
*/
@Override
public ImmutableSortedSet<E> headSet(E toElement, boolean inclusive) {
return headSetImpl(checkNotNull(toElement), inclusive);
}
/**
* {@inheritDoc}
*
* <p>This method returns a serializable {@code ImmutableSortedSet}.
*
* <p>The {@link SortedSet#subSet} documentation states that a subset of a subset throws an {@link
* IllegalArgumentException} if passed a {@code fromElement} smaller than an earlier {@code
* fromElement}. However, this method doesn't throw an exception in that situation, but instead
* keeps the original {@code fromElement}. Similarly, this method keeps the original {@code
* toElement}, instead of throwing an exception, if passed a {@code toElement} greater than an
* earlier {@code toElement}.
*/
@Override
public ImmutableSortedSet<E> subSet(E fromElement, E toElement) {
return subSet(fromElement, true, toElement, false);
}
/**
* @since 12.0
*/
@GwtIncompatible // NavigableSet
@Override
public ImmutableSortedSet<E> subSet(
E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
checkNotNull(fromElement);
checkNotNull(toElement);
checkArgument(comparator.compare(fromElement, toElement) <= 0);
return subSetImpl(fromElement, fromInclusive, toElement, toInclusive);
}
/**
* {@inheritDoc}
*
* <p>This method returns a serializable {@code ImmutableSortedSet}.
*
* <p>The {@link SortedSet#tailSet} documentation states that a subset of a subset throws an
* {@link IllegalArgumentException} if passed a {@code fromElement} smaller than an earlier {@code
* fromElement}. However, this method doesn't throw an exception in that situation, but instead
* keeps the original {@code fromElement}.
*/
@Override
public ImmutableSortedSet<E> tailSet(E fromElement) {
return tailSet(fromElement, true);
}
/**
* @since 12.0
*/
@Override
public ImmutableSortedSet<E> tailSet(E fromElement, boolean inclusive) {
return tailSetImpl(checkNotNull(fromElement), inclusive);
}
/*
* These methods perform most headSet, subSet, and tailSet logic, besides
* parameter validation.
*/
abstract ImmutableSortedSet<E> headSetImpl(E toElement, boolean inclusive);
abstract ImmutableSortedSet<E> subSetImpl(
E fromElement, boolean fromInclusive, E toElement, boolean toInclusive);
abstract ImmutableSortedSet<E> tailSetImpl(E fromElement, boolean inclusive);
/**
* @since 12.0
*/
@GwtIncompatible // NavigableSet
@Override
public @Nullable E lower(E e) {
return Iterators.<@Nullable E>getNext(headSet(e, false).descendingIterator(), null);
}
/**
* @since 12.0
*/
@Override
public @Nullable E floor(E e) {
return Iterators.<@Nullable E>getNext(headSet(e, true).descendingIterator(), null);
}
/**
* @since 12.0
*/
@Override
public @Nullable E ceiling(E e) {
return Iterables.<@Nullable E>getFirst(tailSet(e, true), null);
}
/**
* @since 12.0
*/
@GwtIncompatible // NavigableSet
@Override
public @Nullable E higher(E e) {
return Iterables.<@Nullable E>getFirst(tailSet(e, false), null);
}
@Override
public E first() {
return iterator().next();
}
@Override
public E last() {
return descendingIterator().next();
}
/**
* Guaranteed to throw an exception and leave the set unmodified.
*
* @since 12.0
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@CanIgnoreReturnValue
@Deprecated
@GwtIncompatible // NavigableSet
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final @Nullable E pollFirst() {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the set unmodified.
*
* @since 12.0
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@CanIgnoreReturnValue
@Deprecated
@GwtIncompatible // NavigableSet
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final @Nullable E pollLast() {
throw new UnsupportedOperationException();
}
@GwtIncompatible // NavigableSet
@LazyInit
transient @Nullable ImmutableSortedSet<E> descendingSet;
/**
* @since 12.0
*/
@GwtIncompatible // NavigableSet
@Override
public ImmutableSortedSet<E> descendingSet() {
// racy single-check idiom
ImmutableSortedSet<E> result = descendingSet;
if (result == null) {
result = descendingSet = createDescendingSet();
result.descendingSet = this;
}
return result;
}
// Most classes should implement this as new DescendingImmutableSortedSet<E>(this),
// but we push down that implementation because ProGuard can't eliminate it even when it's always
// overridden.
@GwtIncompatible // NavigableSet
abstract ImmutableSortedSet<E> createDescendingSet();
/**
* @since 12.0
*/
@GwtIncompatible // NavigableSet
@Override
public abstract UnmodifiableIterator<E> descendingIterator();
/** Returns the position of an element within the set, or -1 if not present. */
abstract int indexOf(@Nullable Object target);
/*
* This class is used to serialize all ImmutableSortedSet instances,
* regardless of implementation type. It captures their "logical contents"
* only. This is necessary to ensure that the existence of a particular
* implementation type is an implementation detail.
*/
@J2ktIncompatible // serialization
private static final class SerializedForm<E> implements Serializable {
final Comparator<? super E> comparator;
final Object[] elements;
SerializedForm(Comparator<? super E> comparator, Object[] elements) {
this.comparator = comparator;
this.elements = elements;
}
@SuppressWarnings("unchecked")
Object readResolve() {
return new Builder<E>(comparator).add((E[]) elements).build();
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
@J2ktIncompatible // serialization
private void readObject(ObjectInputStream unused) throws InvalidObjectException {
throw new InvalidObjectException("Use SerializedForm");
}
@Override
@J2ktIncompatible // serialization
Object writeReplace() {
return new SerializedForm<E>(comparator, toArray());
}
/**
* Not supported. Use {@link #toImmutableSortedSet} instead. This method exists only to hide
* {@link ImmutableSet#toImmutableSet} from consumers of {@code ImmutableSortedSet}.
*
* @throws UnsupportedOperationException always
* @deprecated Use {@link ImmutableSortedSet#toImmutableSortedSet}.
* @since 33.2.0 (available since 21.0 in guava-jre)
*/
@DoNotCall("Use toImmutableSortedSet")
@Deprecated
@IgnoreJRERequirement // Users will use this only if they're already using streams.
public static <E> Collector<E, ?, ImmutableSet<E>> toImmutableSet() {
throw new UnsupportedOperationException();
}
/**
* Not supported. Use {@link #naturalOrder}, which offers better type-safety, instead. This method
* exists only to hide {@link ImmutableSet#builder} from consumers of {@code ImmutableSortedSet}.
*
* @throws UnsupportedOperationException always
* @deprecated Use {@link ImmutableSortedSet#naturalOrder}, which offers better type-safety.
*/
@DoNotCall("Use naturalOrder")
@Deprecated
public static <E> ImmutableSortedSet.Builder<E> builder() {
throw new UnsupportedOperationException();
}
/**
* Not supported. This method exists only to hide {@link ImmutableSet#builderWithExpectedSize}
* from consumers of {@code ImmutableSortedSet}.
*
* @throws UnsupportedOperationException always
* @deprecated Not supported by ImmutableSortedSet.
*/
@DoNotCall("Use naturalOrder (which does not accept an expected size)")
@Deprecated
public static <E> ImmutableSortedSet.Builder<E> builderWithExpectedSize(int expectedSize) {
throw new UnsupportedOperationException();
}
/**
* Not supported. <b>You are attempting to create a set that may contain a non-{@code Comparable}
* element.</b> Proper calls will resolve to the version in {@code ImmutableSortedSet}, not this
* dummy version.
*
* @throws UnsupportedOperationException always
* @deprecated <b>Pass a parameter of type {@code Comparable} to use {@link
* ImmutableSortedSet#of(Comparable)}.</b>
*/
@DoNotCall("Pass a parameter of type Comparable")
@Deprecated
public static <E> ImmutableSortedSet<E> of(E e1) {
throw new UnsupportedOperationException();
}
/**
* Not supported. <b>You are attempting to create a set that may contain a non-{@code Comparable}
* element.</b> Proper calls will resolve to the version in {@code ImmutableSortedSet}, not this
* dummy version.
*
* @throws UnsupportedOperationException always
* @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link
* ImmutableSortedSet#of(Comparable, Comparable)}.</b>
*/
@DoNotCall("Pass parameters of type Comparable")
@Deprecated
public static <E> ImmutableSortedSet<E> of(E e1, E e2) {
throw new UnsupportedOperationException();
}
/**
* Not supported. <b>You are attempting to create a set that may contain a non-{@code Comparable}
* element.</b> Proper calls will resolve to the version in {@code ImmutableSortedSet}, not this
* dummy version.
*
* @throws UnsupportedOperationException always
* @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link
* ImmutableSortedSet#of(Comparable, Comparable, Comparable)}.</b>
*/
@DoNotCall("Pass parameters of type Comparable")
@Deprecated
public static <E> ImmutableSortedSet<E> of(E e1, E e2, E e3) {
throw new UnsupportedOperationException();
}
/**
* Not supported. <b>You are attempting to create a set that may contain a non-{@code Comparable}
* element.</b> Proper calls will resolve to the version in {@code ImmutableSortedSet}, not this
* dummy version.
*
* @throws UnsupportedOperationException always
* @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link
* ImmutableSortedSet#of(Comparable, Comparable, Comparable, Comparable)}. </b>
*/
@DoNotCall("Pass parameters of type Comparable")
@Deprecated
public static <E> ImmutableSortedSet<E> of(E e1, E e2, E e3, E e4) {
throw new UnsupportedOperationException();
}
/**
* Not supported. <b>You are attempting to create a set that may contain a non-{@code Comparable}
* element.</b> Proper calls will resolve to the version in {@code ImmutableSortedSet}, not this
* dummy version.
*
* @throws UnsupportedOperationException always
* @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link
* ImmutableSortedSet#of( Comparable, Comparable, Comparable, Comparable, Comparable)}. </b>
*/
@DoNotCall("Pass parameters of type Comparable")
@Deprecated
public static <E> ImmutableSortedSet<E> of(E e1, E e2, E e3, E e4, E e5) {
throw new UnsupportedOperationException();
}
/**
* Not supported. <b>You are attempting to create a set that may contain a non-{@code Comparable}
* element.</b> Proper calls will resolve to the version in {@code ImmutableSortedSet}, not this
* dummy version.
*
* @throws UnsupportedOperationException always
* @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link
* ImmutableSortedSet#of(Comparable, Comparable, Comparable, Comparable, Comparable,
* Comparable, Comparable...)}. </b>
*/
@DoNotCall("Pass parameters of type Comparable")
@Deprecated
public static <E> ImmutableSortedSet<E> of(E e1, E e2, E e3, E e4, E e5, E e6, E... remaining) {
throw new UnsupportedOperationException();
}
/**
* Not supported. <b>You are attempting to create a set that may contain non-{@code Comparable}
* elements.</b> Proper calls will resolve to the version in {@code ImmutableSortedSet}, not this
* dummy version.
*
* @throws UnsupportedOperationException always
* @deprecated <b>Pass parameters of type {@code Comparable} to use {@link
* ImmutableSortedSet#copyOf(Comparable[])}.</b>
*/
@DoNotCall("Pass parameters of type Comparable")
@Deprecated
// The usage of "Z" here works around bugs in Javadoc (JDK-8318093) and JDiff.
public static <Z> ImmutableSortedSet<Z> copyOf(Z[] elements) {
throw new UnsupportedOperationException();
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0xdecaf;
}
|
googleapis/google-cloud-java | 37,743 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/NetworkAttachmentsStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.NetworkAttachmentsClient.AggregatedListPagedResponse;
import static com.google.cloud.compute.v1.NetworkAttachmentsClient.ListPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.httpjson.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.AggregatedListNetworkAttachmentsRequest;
import com.google.cloud.compute.v1.DeleteNetworkAttachmentRequest;
import com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest;
import com.google.cloud.compute.v1.GetNetworkAttachmentRequest;
import com.google.cloud.compute.v1.InsertNetworkAttachmentRequest;
import com.google.cloud.compute.v1.ListNetworkAttachmentsRequest;
import com.google.cloud.compute.v1.NetworkAttachment;
import com.google.cloud.compute.v1.NetworkAttachmentAggregatedList;
import com.google.cloud.compute.v1.NetworkAttachmentList;
import com.google.cloud.compute.v1.NetworkAttachmentsScopedList;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.PatchNetworkAttachmentRequest;
import com.google.cloud.compute.v1.Policy;
import com.google.cloud.compute.v1.SetIamPolicyNetworkAttachmentRequest;
import com.google.cloud.compute.v1.TestIamPermissionsNetworkAttachmentRequest;
import com.google.cloud.compute.v1.TestPermissionsResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link NetworkAttachmentsStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (compute.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of get:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* NetworkAttachmentsStubSettings.Builder networkAttachmentsSettingsBuilder =
* NetworkAttachmentsStubSettings.newBuilder();
* networkAttachmentsSettingsBuilder
* .getSettings()
* .setRetrySettings(
* networkAttachmentsSettingsBuilder
* .getSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* NetworkAttachmentsStubSettings networkAttachmentsSettings =
* networkAttachmentsSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for delete:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* NetworkAttachmentsStubSettings.Builder networkAttachmentsSettingsBuilder =
* NetworkAttachmentsStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* networkAttachmentsSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class NetworkAttachmentsStubSettings extends StubSettings<NetworkAttachmentsStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/cloud-platform")
.build();
private final PagedCallSettings<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings<DeleteNetworkAttachmentRequest, Operation> deleteSettings;
private final OperationCallSettings<DeleteNetworkAttachmentRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings<GetNetworkAttachmentRequest, NetworkAttachment> getSettings;
private final UnaryCallSettings<GetIamPolicyNetworkAttachmentRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings<InsertNetworkAttachmentRequest, Operation> insertSettings;
private final OperationCallSettings<InsertNetworkAttachmentRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings<
ListNetworkAttachmentsRequest, NetworkAttachmentList, ListPagedResponse>
listSettings;
private final UnaryCallSettings<PatchNetworkAttachmentRequest, Operation> patchSettings;
private final OperationCallSettings<PatchNetworkAttachmentRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings<SetIamPolicyNetworkAttachmentRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings<
TestIamPermissionsNetworkAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
Map.Entry<String, NetworkAttachmentsScopedList>>
AGGREGATED_LIST_PAGE_STR_DESC =
new PagedListDescriptor<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
Map.Entry<String, NetworkAttachmentsScopedList>>() {
@Override
public String emptyToken() {
return "";
}
@Override
public AggregatedListNetworkAttachmentsRequest injectToken(
AggregatedListNetworkAttachmentsRequest payload, String token) {
return AggregatedListNetworkAttachmentsRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public AggregatedListNetworkAttachmentsRequest injectPageSize(
AggregatedListNetworkAttachmentsRequest payload, int pageSize) {
return AggregatedListNetworkAttachmentsRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(AggregatedListNetworkAttachmentsRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(NetworkAttachmentAggregatedList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Map.Entry<String, NetworkAttachmentsScopedList>> extractResources(
NetworkAttachmentAggregatedList payload) {
return payload.getItemsMap().entrySet();
}
};
private static final PagedListDescriptor<
ListNetworkAttachmentsRequest, NetworkAttachmentList, NetworkAttachment>
LIST_PAGE_STR_DESC =
new PagedListDescriptor<
ListNetworkAttachmentsRequest, NetworkAttachmentList, NetworkAttachment>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListNetworkAttachmentsRequest injectToken(
ListNetworkAttachmentsRequest payload, String token) {
return ListNetworkAttachmentsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListNetworkAttachmentsRequest injectPageSize(
ListNetworkAttachmentsRequest payload, int pageSize) {
return ListNetworkAttachmentsRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListNetworkAttachmentsRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(NetworkAttachmentList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<NetworkAttachment> extractResources(NetworkAttachmentList payload) {
return payload.getItemsList();
}
};
private static final PagedListResponseFactory<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
AggregatedListPagedResponse>
AGGREGATED_LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
AggregatedListPagedResponse>() {
@Override
public ApiFuture<AggregatedListPagedResponse> getFuturePagedResponse(
UnaryCallable<
AggregatedListNetworkAttachmentsRequest, NetworkAttachmentAggregatedList>
callable,
AggregatedListNetworkAttachmentsRequest request,
ApiCallContext context,
ApiFuture<NetworkAttachmentAggregatedList> futureResponse) {
PageContext<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
Map.Entry<String, NetworkAttachmentsScopedList>>
pageContext =
PageContext.create(callable, AGGREGATED_LIST_PAGE_STR_DESC, request, context);
return AggregatedListPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListNetworkAttachmentsRequest, NetworkAttachmentList, ListPagedResponse>
LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
ListNetworkAttachmentsRequest, NetworkAttachmentList, ListPagedResponse>() {
@Override
public ApiFuture<ListPagedResponse> getFuturePagedResponse(
UnaryCallable<ListNetworkAttachmentsRequest, NetworkAttachmentList> callable,
ListNetworkAttachmentsRequest request,
ApiCallContext context,
ApiFuture<NetworkAttachmentList> futureResponse) {
PageContext<ListNetworkAttachmentsRequest, NetworkAttachmentList, NetworkAttachment>
pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context);
return ListPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to aggregatedList. */
public PagedCallSettings<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the object with the settings used for calls to delete. */
public UnaryCallSettings<DeleteNetworkAttachmentRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the object with the settings used for calls to delete. */
public OperationCallSettings<DeleteNetworkAttachmentRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the object with the settings used for calls to get. */
public UnaryCallSettings<GetNetworkAttachmentRequest, NetworkAttachment> getSettings() {
return getSettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyNetworkAttachmentRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to insert. */
public UnaryCallSettings<InsertNetworkAttachmentRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the object with the settings used for calls to insert. */
public OperationCallSettings<InsertNetworkAttachmentRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the object with the settings used for calls to list. */
public PagedCallSettings<ListNetworkAttachmentsRequest, NetworkAttachmentList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the object with the settings used for calls to patch. */
public UnaryCallSettings<PatchNetworkAttachmentRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the object with the settings used for calls to patch. */
public OperationCallSettings<PatchNetworkAttachmentRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyNetworkAttachmentRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsNetworkAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public NetworkAttachmentsStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonNetworkAttachmentsStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "compute";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "compute.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "compute.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(NetworkAttachmentsStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected NetworkAttachmentsStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
aggregatedListSettings = settingsBuilder.aggregatedListSettings().build();
deleteSettings = settingsBuilder.deleteSettings().build();
deleteOperationSettings = settingsBuilder.deleteOperationSettings().build();
getSettings = settingsBuilder.getSettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
insertSettings = settingsBuilder.insertSettings().build();
insertOperationSettings = settingsBuilder.insertOperationSettings().build();
listSettings = settingsBuilder.listSettings().build();
patchSettings = settingsBuilder.patchSettings().build();
patchOperationSettings = settingsBuilder.patchOperationSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for NetworkAttachmentsStubSettings. */
public static class Builder
extends StubSettings.Builder<NetworkAttachmentsStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings.Builder<DeleteNetworkAttachmentRequest, Operation>
deleteSettings;
private final OperationCallSettings.Builder<
DeleteNetworkAttachmentRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings.Builder<GetNetworkAttachmentRequest, NetworkAttachment>
getSettings;
private final UnaryCallSettings.Builder<GetIamPolicyNetworkAttachmentRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings.Builder<InsertNetworkAttachmentRequest, Operation>
insertSettings;
private final OperationCallSettings.Builder<
InsertNetworkAttachmentRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings.Builder<
ListNetworkAttachmentsRequest, NetworkAttachmentList, ListPagedResponse>
listSettings;
private final UnaryCallSettings.Builder<PatchNetworkAttachmentRequest, Operation> patchSettings;
private final OperationCallSettings.Builder<PatchNetworkAttachmentRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings.Builder<SetIamPolicyNetworkAttachmentRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings.Builder<
TestIamPermissionsNetworkAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
aggregatedListSettings = PagedCallSettings.newBuilder(AGGREGATED_LIST_PAGE_STR_FACT);
deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteOperationSettings = OperationCallSettings.newBuilder();
getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertOperationSettings = OperationCallSettings.newBuilder();
listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT);
patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
patchOperationSettings = OperationCallSettings.newBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(NetworkAttachmentsStubSettings settings) {
super(settings);
aggregatedListSettings = settings.aggregatedListSettings.toBuilder();
deleteSettings = settings.deleteSettings.toBuilder();
deleteOperationSettings = settings.deleteOperationSettings.toBuilder();
getSettings = settings.getSettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
insertSettings = settings.insertSettings.toBuilder();
insertOperationSettings = settings.insertOperationSettings.toBuilder();
listSettings = settings.listSettings.toBuilder();
patchSettings = settings.patchSettings.toBuilder();
patchOperationSettings = settings.patchOperationSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.aggregatedListSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.insertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.patchSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteNetworkAttachmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.insertOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<InsertNetworkAttachmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.patchOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<PatchNetworkAttachmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to aggregatedList. */
public PagedCallSettings.Builder<
AggregatedListNetworkAttachmentsRequest,
NetworkAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public UnaryCallSettings.Builder<DeleteNetworkAttachmentRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public OperationCallSettings.Builder<DeleteNetworkAttachmentRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the builder for the settings used for calls to get. */
public UnaryCallSettings.Builder<GetNetworkAttachmentRequest, NetworkAttachment> getSettings() {
return getSettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyNetworkAttachmentRequest, Policy>
getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to insert. */
public UnaryCallSettings.Builder<InsertNetworkAttachmentRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public OperationCallSettings.Builder<InsertNetworkAttachmentRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the builder for the settings used for calls to list. */
public PagedCallSettings.Builder<
ListNetworkAttachmentsRequest, NetworkAttachmentList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public UnaryCallSettings.Builder<PatchNetworkAttachmentRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public OperationCallSettings.Builder<PatchNetworkAttachmentRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyNetworkAttachmentRequest, Policy>
setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<
TestIamPermissionsNetworkAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public NetworkAttachmentsStubSettings build() throws IOException {
return new NetworkAttachmentsStubSettings(this);
}
}
}
|
googleapis/google-cloud-java | 37,743 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/ServiceAttachmentsStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.ServiceAttachmentsClient.AggregatedListPagedResponse;
import static com.google.cloud.compute.v1.ServiceAttachmentsClient.ListPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.httpjson.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.AggregatedListServiceAttachmentsRequest;
import com.google.cloud.compute.v1.DeleteServiceAttachmentRequest;
import com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest;
import com.google.cloud.compute.v1.GetServiceAttachmentRequest;
import com.google.cloud.compute.v1.InsertServiceAttachmentRequest;
import com.google.cloud.compute.v1.ListServiceAttachmentsRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.PatchServiceAttachmentRequest;
import com.google.cloud.compute.v1.Policy;
import com.google.cloud.compute.v1.ServiceAttachment;
import com.google.cloud.compute.v1.ServiceAttachmentAggregatedList;
import com.google.cloud.compute.v1.ServiceAttachmentList;
import com.google.cloud.compute.v1.ServiceAttachmentsScopedList;
import com.google.cloud.compute.v1.SetIamPolicyServiceAttachmentRequest;
import com.google.cloud.compute.v1.TestIamPermissionsServiceAttachmentRequest;
import com.google.cloud.compute.v1.TestPermissionsResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link ServiceAttachmentsStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (compute.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of get:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ServiceAttachmentsStubSettings.Builder serviceAttachmentsSettingsBuilder =
* ServiceAttachmentsStubSettings.newBuilder();
* serviceAttachmentsSettingsBuilder
* .getSettings()
* .setRetrySettings(
* serviceAttachmentsSettingsBuilder
* .getSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* ServiceAttachmentsStubSettings serviceAttachmentsSettings =
* serviceAttachmentsSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for delete:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ServiceAttachmentsStubSettings.Builder serviceAttachmentsSettingsBuilder =
* ServiceAttachmentsStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* serviceAttachmentsSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class ServiceAttachmentsStubSettings extends StubSettings<ServiceAttachmentsStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/cloud-platform")
.build();
private final PagedCallSettings<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings<DeleteServiceAttachmentRequest, Operation> deleteSettings;
private final OperationCallSettings<DeleteServiceAttachmentRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings<GetServiceAttachmentRequest, ServiceAttachment> getSettings;
private final UnaryCallSettings<GetIamPolicyServiceAttachmentRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings<InsertServiceAttachmentRequest, Operation> insertSettings;
private final OperationCallSettings<InsertServiceAttachmentRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings<
ListServiceAttachmentsRequest, ServiceAttachmentList, ListPagedResponse>
listSettings;
private final UnaryCallSettings<PatchServiceAttachmentRequest, Operation> patchSettings;
private final OperationCallSettings<PatchServiceAttachmentRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings<SetIamPolicyServiceAttachmentRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings<
TestIamPermissionsServiceAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
Map.Entry<String, ServiceAttachmentsScopedList>>
AGGREGATED_LIST_PAGE_STR_DESC =
new PagedListDescriptor<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
Map.Entry<String, ServiceAttachmentsScopedList>>() {
@Override
public String emptyToken() {
return "";
}
@Override
public AggregatedListServiceAttachmentsRequest injectToken(
AggregatedListServiceAttachmentsRequest payload, String token) {
return AggregatedListServiceAttachmentsRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public AggregatedListServiceAttachmentsRequest injectPageSize(
AggregatedListServiceAttachmentsRequest payload, int pageSize) {
return AggregatedListServiceAttachmentsRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(AggregatedListServiceAttachmentsRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(ServiceAttachmentAggregatedList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Map.Entry<String, ServiceAttachmentsScopedList>> extractResources(
ServiceAttachmentAggregatedList payload) {
return payload.getItemsMap().entrySet();
}
};
private static final PagedListDescriptor<
ListServiceAttachmentsRequest, ServiceAttachmentList, ServiceAttachment>
LIST_PAGE_STR_DESC =
new PagedListDescriptor<
ListServiceAttachmentsRequest, ServiceAttachmentList, ServiceAttachment>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListServiceAttachmentsRequest injectToken(
ListServiceAttachmentsRequest payload, String token) {
return ListServiceAttachmentsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListServiceAttachmentsRequest injectPageSize(
ListServiceAttachmentsRequest payload, int pageSize) {
return ListServiceAttachmentsRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListServiceAttachmentsRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(ServiceAttachmentList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<ServiceAttachment> extractResources(ServiceAttachmentList payload) {
return payload.getItemsList();
}
};
private static final PagedListResponseFactory<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
AggregatedListPagedResponse>
AGGREGATED_LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
AggregatedListPagedResponse>() {
@Override
public ApiFuture<AggregatedListPagedResponse> getFuturePagedResponse(
UnaryCallable<
AggregatedListServiceAttachmentsRequest, ServiceAttachmentAggregatedList>
callable,
AggregatedListServiceAttachmentsRequest request,
ApiCallContext context,
ApiFuture<ServiceAttachmentAggregatedList> futureResponse) {
PageContext<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
Map.Entry<String, ServiceAttachmentsScopedList>>
pageContext =
PageContext.create(callable, AGGREGATED_LIST_PAGE_STR_DESC, request, context);
return AggregatedListPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListServiceAttachmentsRequest, ServiceAttachmentList, ListPagedResponse>
LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
ListServiceAttachmentsRequest, ServiceAttachmentList, ListPagedResponse>() {
@Override
public ApiFuture<ListPagedResponse> getFuturePagedResponse(
UnaryCallable<ListServiceAttachmentsRequest, ServiceAttachmentList> callable,
ListServiceAttachmentsRequest request,
ApiCallContext context,
ApiFuture<ServiceAttachmentList> futureResponse) {
PageContext<ListServiceAttachmentsRequest, ServiceAttachmentList, ServiceAttachment>
pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context);
return ListPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to aggregatedList. */
public PagedCallSettings<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the object with the settings used for calls to delete. */
public UnaryCallSettings<DeleteServiceAttachmentRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the object with the settings used for calls to delete. */
public OperationCallSettings<DeleteServiceAttachmentRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the object with the settings used for calls to get. */
public UnaryCallSettings<GetServiceAttachmentRequest, ServiceAttachment> getSettings() {
return getSettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyServiceAttachmentRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to insert. */
public UnaryCallSettings<InsertServiceAttachmentRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the object with the settings used for calls to insert. */
public OperationCallSettings<InsertServiceAttachmentRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the object with the settings used for calls to list. */
public PagedCallSettings<ListServiceAttachmentsRequest, ServiceAttachmentList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the object with the settings used for calls to patch. */
public UnaryCallSettings<PatchServiceAttachmentRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the object with the settings used for calls to patch. */
public OperationCallSettings<PatchServiceAttachmentRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyServiceAttachmentRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsServiceAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public ServiceAttachmentsStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonServiceAttachmentsStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "compute";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "compute.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "compute.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(ServiceAttachmentsStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ServiceAttachmentsStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
aggregatedListSettings = settingsBuilder.aggregatedListSettings().build();
deleteSettings = settingsBuilder.deleteSettings().build();
deleteOperationSettings = settingsBuilder.deleteOperationSettings().build();
getSettings = settingsBuilder.getSettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
insertSettings = settingsBuilder.insertSettings().build();
insertOperationSettings = settingsBuilder.insertOperationSettings().build();
listSettings = settingsBuilder.listSettings().build();
patchSettings = settingsBuilder.patchSettings().build();
patchOperationSettings = settingsBuilder.patchOperationSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for ServiceAttachmentsStubSettings. */
public static class Builder
extends StubSettings.Builder<ServiceAttachmentsStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings.Builder<DeleteServiceAttachmentRequest, Operation>
deleteSettings;
private final OperationCallSettings.Builder<
DeleteServiceAttachmentRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings.Builder<GetServiceAttachmentRequest, ServiceAttachment>
getSettings;
private final UnaryCallSettings.Builder<GetIamPolicyServiceAttachmentRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings.Builder<InsertServiceAttachmentRequest, Operation>
insertSettings;
private final OperationCallSettings.Builder<
InsertServiceAttachmentRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings.Builder<
ListServiceAttachmentsRequest, ServiceAttachmentList, ListPagedResponse>
listSettings;
private final UnaryCallSettings.Builder<PatchServiceAttachmentRequest, Operation> patchSettings;
private final OperationCallSettings.Builder<PatchServiceAttachmentRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings.Builder<SetIamPolicyServiceAttachmentRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings.Builder<
TestIamPermissionsServiceAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
aggregatedListSettings = PagedCallSettings.newBuilder(AGGREGATED_LIST_PAGE_STR_FACT);
deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteOperationSettings = OperationCallSettings.newBuilder();
getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertOperationSettings = OperationCallSettings.newBuilder();
listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT);
patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
patchOperationSettings = OperationCallSettings.newBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(ServiceAttachmentsStubSettings settings) {
super(settings);
aggregatedListSettings = settings.aggregatedListSettings.toBuilder();
deleteSettings = settings.deleteSettings.toBuilder();
deleteOperationSettings = settings.deleteOperationSettings.toBuilder();
getSettings = settings.getSettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
insertSettings = settings.insertSettings.toBuilder();
insertOperationSettings = settings.insertOperationSettings.toBuilder();
listSettings = settings.listSettings.toBuilder();
patchSettings = settings.patchSettings.toBuilder();
patchOperationSettings = settings.patchOperationSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.aggregatedListSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.insertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.patchSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteServiceAttachmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.insertOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<InsertServiceAttachmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.patchOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<PatchServiceAttachmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to aggregatedList. */
public PagedCallSettings.Builder<
AggregatedListServiceAttachmentsRequest,
ServiceAttachmentAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public UnaryCallSettings.Builder<DeleteServiceAttachmentRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public OperationCallSettings.Builder<DeleteServiceAttachmentRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the builder for the settings used for calls to get. */
public UnaryCallSettings.Builder<GetServiceAttachmentRequest, ServiceAttachment> getSettings() {
return getSettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyServiceAttachmentRequest, Policy>
getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to insert. */
public UnaryCallSettings.Builder<InsertServiceAttachmentRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public OperationCallSettings.Builder<InsertServiceAttachmentRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the builder for the settings used for calls to list. */
public PagedCallSettings.Builder<
ListServiceAttachmentsRequest, ServiceAttachmentList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public UnaryCallSettings.Builder<PatchServiceAttachmentRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public OperationCallSettings.Builder<PatchServiceAttachmentRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyServiceAttachmentRequest, Policy>
setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<
TestIamPermissionsServiceAttachmentRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public ServiceAttachmentsStubSettings build() throws IOException {
return new ServiceAttachmentsStubSettings(this);
}
}
}
|
apache/datasketches-java | 37,649 | src/main/java/org/apache/datasketches/common/Util.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.common;
import static java.lang.Math.ceil;
import static java.lang.Math.floor;
import static java.lang.Math.log;
import static java.lang.Math.pow;
import static java.lang.Math.round;
import static java.lang.foreign.ValueLayout.JAVA_BYTE;
import static org.apache.datasketches.hash.MurmurHash3.hash;
import java.lang.foreign.MemorySegment;
import java.nio.ByteOrder;
import java.util.Comparator;
/**
* Common utility functions.
*
* @author Lee Rhodes
*/
@SuppressWarnings("unchecked")
public final class Util {
static {
if (ByteOrder.nativeOrder() != ByteOrder.LITTLE_ENDIAN) {
throw new SketchesNotSupportedException("Machine Native Endianness must be LITTLE_ENDIAN.");
}
}
/**
* The java line separator character as a String.
*/
public static final String LS = System.getProperty("line.separator");
/**
* The tab character
*/
public static final char TAB = '\t';
/**
* The natural logarithm of 2.0.
*/
public static final double LOG2 = log(2.0);
/**
* The inverse golden ratio as an unsigned long.
*/
public static final long INVERSE_GOLDEN_U64 = 0x9e3779b97f4a7c13L;
/**
* The inverse golden ratio as a fraction.
* This has more precision than using the formula: (Math.sqrt(5.0) - 1.0) / 2.0.
*/
public static final double INVERSE_GOLDEN = 0.6180339887498949025;
/**
* Long.MAX_VALUE as a double.
*/
public static final double LONG_MAX_VALUE_AS_DOUBLE = Long.MAX_VALUE;
/**
* The seed 9001 used in the sketch update methods is a prime number that
* was chosen very early on in experimental testing. Choosing a seed is somewhat arbitrary, and
* the author cannot prove that this particular seed is somehow superior to other seeds. There
* was some early Internet discussion that a seed of 0 did not produce as clean avalanche diagrams
* as non-zero seeds, but this may have been more related to the MurmurHash2 release, which did
* have some issues. As far as the author can determine, MurmurHash3 does not have these problems.
*
* <p>In order to perform set operations on two sketches it is critical that the same hash
* function and seed are identical for both sketches, otherwise the assumed 1:1 relationship
* between the original source key value and the hashed bit string would be violated. Once
* you have developed a history of stored sketches you are stuck with it.
*
* <p><b>WARNING:</b> This seed is used internally by library sketches in different
* packages and thus must be declared public. However, this seed value must not be used by library
* users with the MurmurHash3 function. It should be viewed as existing for exclusive, private
* use by the library.
*
* <p><a href="{@docRoot}/resources/dictionary.html#defaultUpdateSeed">See Default Update Seed</a>
*/
public static final long DEFAULT_UPDATE_SEED = 9001L;
private Util() {}
//Byte Conversions
/**
* Returns an int extracted from a Little-Endian byte array.
* @param arr the given byte array
* @return an int extracted from a Little-Endian byte array.
*/
public static int bytesToInt(final byte[] arr) {
return (arr[3] << 24)
| ((arr[2] & 0xff) << 16)
| ((arr[1] & 0xff) << 8)
| (arr[0] & 0xff);
}
/**
* Returns a long extracted from a Little-Endian byte array.
* @param arr the given byte array
* @return a long extracted from a Little-Endian byte array.
*/
public static long bytesToLong(final byte[] arr) {
return ((long)arr[7] << 56)
| (((long)arr[6] & 0xff) << 48)
| (((long)arr[5] & 0xff) << 40)
| (((long)arr[4] & 0xff) << 32)
| (((long)arr[3] & 0xff) << 24)
| (((long)arr[2] & 0xff) << 16)
| (((long)arr[1] & 0xff) << 8)
| ((long)arr[0] & 0xff);
}
/**
* Returns a Little-Endian byte array extracted from the given int.
* @param v the given int
* @param arr a given array of 4 bytes that will be returned with the data
* @return a Little-Endian byte array extracted from the given int.
*/
public static byte[] intToBytes(final int v, final byte[] arr) {
arr[3] = (byte) (v >>> 24);
arr[2] = (byte) (v >>> 16);
arr[1] = (byte) (v >>> 8);
arr[0] = (byte) v;
return arr;
}
/**
* Returns a Little-Endian byte array extracted from the given long.
* @param v the given long
* @param arr a given array of 8 bytes that will be returned with the data
* @return a Little-Endian byte array extracted from the given long.
*/
public static byte[] longToBytes(final long v, final byte[] arr) {
arr[7] = (byte) (v >>> 56);
arr[6] = (byte) (v >>> 48);
arr[5] = (byte) (v >>> 40);
arr[4] = (byte) (v >>> 32);
arr[3] = (byte) (v >>> 24);
arr[2] = (byte) (v >>> 16);
arr[1] = (byte) (v >>> 8);
arr[0] = (byte) v;
return arr;
}
//Byte array conversions
static long[] convertToLongArray(final byte[] byteArr, final boolean littleEndian) {
final int len = byteArr.length;
final long[] longArr = new long[(len / 8) + ((len % 8) != 0 ? 1 : 0)];
int off = 0;
int longArrIdx = 0;
while (off < len) {
final int rem = Math.min(len - 1 - off, 7);
long tgt = 0;
if (littleEndian) {
for (int j = off + rem, k = 0; j >= off; --j, k++) {
tgt |= (byteArr[j] & 0XFFL) << (k * 8);
}
} else { //BE
for (int j = off + rem, k = rem; j >= off; --j, k--) {
tgt |= (byteArr[j] & 0XFFL) << (k * 8);
}
}
off += 8;
longArr[longArrIdx++] = tgt;
}
return longArr;
}
//String Related
/**
* Returns a string of spaced hex bytes in Big-Endian order.
* @param v the given long
* @return string of spaced hex bytes in Big-Endian order.
*/
public static String longToHexBytes(final long v) {
final long mask = 0XFFL;
final StringBuilder sb = new StringBuilder();
for (int i = 8; i-- > 0; ) {
final String s = Long.toHexString((v >>> (i * 8)) & mask);
sb.append(zeroPad(s, 2)).append(" ");
}
return sb.toString();
}
/**
* Returns a string view of a byte array
* @param arr the given byte array
* @param signed set true if you want the byte values signed.
* @param littleEndian set true if you want Little-Endian order
* @param sep the separator string between bytes
* @return a string view of a byte array
*/
public static String bytesToString(
final byte[] arr, final boolean signed, final boolean littleEndian, final String sep) {
final StringBuilder sb = new StringBuilder();
final int mask = signed ? 0XFFFFFFFF : 0XFF;
final int arrLen = arr.length;
if (littleEndian) {
for (int i = 0; i < (arrLen - 1); i++) {
sb.append(arr[i] & mask).append(sep);
}
sb.append(arr[arrLen - 1] & mask);
} else {
for (int i = arrLen; i-- > 1; ) {
sb.append(arr[i] & mask).append(sep);
}
sb.append(arr[0] & mask);
}
return sb.toString();
}
/**
* Returns the given time in nanoseconds formatted as Sec.mSec_uSec_nSec
* @param nS the given nanoseconds
* @return the given time in nanoseconds formatted as Sec.mSec_uSec_nSec
*/
public static String nanoSecToString(final long nS) {
final long rem_nS = (long)(nS % 1000.0);
final long rem_uS = (long)((nS / 1000.0) % 1000.0);
final long rem_mS = (long)((nS / 1000000.0) % 1000.0);
final long sec = (long)(nS / 1000000000.0);
final String nSstr = zeroPad(Long.toString(rem_nS), 3);
final String uSstr = zeroPad(Long.toString(rem_uS), 3);
final String mSstr = zeroPad(Long.toString(rem_mS), 3);
return String.format("%d.%3s_%3s_%3s", sec, mSstr, uSstr, nSstr);
}
/**
* Returns the given time in milliseconds formatted as Hours:Min:Sec.mSec
* @param mS the given milliseconds
* @return the given time in milliseconds formatted as Hours:Min:Sec.mSec
*/
public static String milliSecToString(final long mS) {
final long rem_mS = (long)(mS % 1000.0);
final long rem_sec = (long)((mS / 1000.0) % 60.0);
final long rem_min = (long)((mS / 60000.0) % 60.0);
final long hr = (long)(mS / 3600000.0);
final String mSstr = zeroPad(Long.toString(rem_mS), 3);
final String secStr = zeroPad(Long.toString(rem_sec), 2);
final String minStr = zeroPad(Long.toString(rem_min), 2);
return String.format("%d:%2s:%2s.%3s", hr, minStr, secStr, mSstr);
}
/**
* Prepend the given string with zeros. If the given string is equal or greater than the given
* field length, it will be returned without modification.
* @param s the given string
* @param fieldLength desired total field length including the given string
* @return the given string prepended with zeros.
*/
public static String zeroPad(final String s, final int fieldLength) {
return characterPad(s, fieldLength, '0', false);
}
/**
* Prepend or postpend the given string with the given character to fill the given field length.
* If the given string is equal to or greater than the given field length, it will be returned
* without modification.
* @param s the given string
* @param fieldLength the desired field length
* @param padChar the desired pad character
* @param postpend if true append the pacCharacters to the end of the string.
* @return prepended or postpended given string with the given character to fill the given field length.
*/
public static String characterPad(final String s, final int fieldLength, final char padChar, final boolean postpend) {
final int sLen = s.length();
if (sLen < fieldLength) {
final char[] cArr = new char[fieldLength - sLen];
java.util.Arrays.fill(cArr, padChar);
final String addstr = String.valueOf(cArr);
return (postpend) ? s.concat(addstr) : addstr.concat(s);
}
return s;
}
//Memory byte alignment
/**
* Checks if parameter v is a multiple of 8 and greater than zero.
* @param v The parameter to check
* @param argName This name will be part of the error message if the check fails.
*/
public static void checkIfMultipleOf8AndGT0(final long v, final String argName) {
if (((v & 0X7L) == 0L) && (v > 0L)) {
return;
}
throw new SketchesArgumentException("The value of the parameter \"" + argName
+ "\" must be a positive multiple of 8 and greater than zero: " + v);
}
/**
* Returns true if v is a multiple of 8 and greater than zero
* @param v The parameter to check
* @return true if v is a multiple of 8 and greater than zero
*/
public static boolean isMultipleOf8AndGT0(final long v) {
return ((v & 0X7L) == 0L) && (v > 0L);
}
//Powers of 2 or powers of base related
/**
* Returns true if given long argument is exactly a positive power of 2.
*
* @param n The input argument.
* @return true if argument is exactly a positive power of 2.
*/
public static boolean isPowerOf2(final long n) {
return (n > 0) && ((n & (n - 1L)) == 0); //or (n > 0) && ((n & -n) == n)
}
/**
* Checks the given long argument if it is a positive integer power of 2.
* If not, it throws an exception with the user supplied local argument name, if not null.
* @param n The input long argument must be a positive integer power of 2.
* @param argName Used in the thrown exception. It may be null.
* @throws SketchesArgumentException if not a positive integer power of 2.
*/
public static void checkIfPowerOf2(final long n, String argName) {
if (isPowerOf2(n)) { return; }
argName = (argName == null) ? "" : argName;
throw new SketchesArgumentException("The value of the argument \"" + argName + "\""
+ " must be a positive integer power of 2: " + n);
}
/**
* Computes the int ceiling power of 2 within the range [1, 2^30]. This is the smallest positive power
* of 2 that is equal to or greater than the given n and a positive integer.
*
* <p>For:
* <ul>
* <li>n ≤ 1: returns 1</li>
* <li>2^30 ≤ n ≤ 2^31 -1 : returns 2^30</li>
* <li>n == an exact power of 2 : returns n</li>
* <li>otherwise returns the smallest power of 2 ≥ n and equal to a positive integer</li>
* </ul>
*
* @param n The input int argument.
* @return the ceiling power of 2.
*/
public static int ceilingPowerOf2(final int n) {
if (n <= 1) { return 1; }
final int topIntPwrOf2 = 1 << 30;
return n >= topIntPwrOf2 ? topIntPwrOf2 : Integer.highestOneBit((n - 1) << 1);
}
/**
* Computes the long ceiling power of 2 within the range [1, 2^62]. This is the smallest positive power
* of 2 that is equal to or greater than the given n and a positive long.
*
* <p>For:
* <ul>
* <li>n ≤ 1: returns 1</li>
* <li>2^62 ≤ n ≤ 2^63 -1 : returns 2^62</li>
* <li>n == an exact power of 2 : returns n</li>
* <li>otherwise returns the smallest power of 2 ≥ n and equal to a positive long</li>
* </ul>
*
* @param n The input long argument.
* @return the ceiling power of 2.
*/
public static long ceilingPowerOf2(final long n) {
if (n <= 1L) { return 1L; }
final long topIntPwrOf2 = 1L << 62;
return n >= topIntPwrOf2 ? topIntPwrOf2 : Long.highestOneBit((n - 1L) << 1);
}
/**
* Computes the floor power of 2 given <i>n</i> is in the range [1, 2^31-1].
* This is the largest positive power of 2 that equal to or less than the given n and equal
* to a positive integer.
*
* <p>For:
* <ul>
* <li>n ≤ 1: returns 1</li>
* <li>2^30 ≤ n ≤ 2^31 -1 : returns 2^30</li>
* <li>n == a power of 2 : returns n</li>
* <li>otherwise returns the largest power of 2 less than n and equal to a mathematical
* integer.</li>
* </ul>
*
* @param n The given int argument.
* @return the floor power of 2 as an int.
*/
public static int floorPowerOf2(final int n) {
if (n <= 1) { return 1; }
return Integer.highestOneBit(n);
}
/**
* Computes the floor power of 2 given <i>n</i> is in the range [1, 2^63-1].
* This is the largest positive power of 2 that is equal to or less than the given <i>n</i> and
* equal to a positive integer.
*
* <p>For:
* <ul>
* <li>n ≤ 1: returns 1</li>
* <li>2^62 ≤ n ≤ 2^63 -1 : returns 2^62</li>
* <li>n == a power of 2 : returns n</li>
* <li>otherwise returns the largest power of 2 less than n and equal to a mathematical
* integer.</li>
* </ul>
*
* @param n The given long argument.
* @return the floor power of 2 as a long
*/
public static long floorPowerOf2(final long n) {
if (n <= 1) { return 1; }
return Long.highestOneBit(n);
}
/**
* This is a long integer equivalent to <i>Math.ceil(n / (double)(1 << k))</i>
* where: <i>0 < k ≤ 6</i> and <i>n</i> is a non-negative long.
* These limits are not checked for speed reasons.
* @param n the input dividend as a positive long greater than zero.
* @param k the input divisor exponent of 2 as a positive integer where 0 < k ≤ 6.
* @return the long integer equivalent to <i>Math.ceil(n / 2^k)</i>.
*/
public static long ceilingMultiple2expK(final long n, final int k) {
final long mask = (1L << k) - 1L;
return (n & mask) > 0 ? (n >>> k) + 1 : n >>> k;
}
/**
* Computes the inverse integer power of 2: 1/(2^e) = 2^(-e).
* @param e a positive value between 0 and 1023 inclusive
* @return the inverse integer power of 2: 1/(2^e) = 2^(-e)
*/
public static double invPow2(final int e) {
assert (e | (1024 - e - 1)) >= 0 : "e cannot be negative or greater than 1023: " + e;
return Double.longBitsToDouble((1023L - e) << 52);
}
/**
* Computes the next larger integer point in the power series
* <i>point = 2<sup>( i / ppo )</sup></i> given the current point in the series.
* For illustration, this can be used in a loop as follows:
*
* <pre>{@code
* int maxP = 1024;
* int minP = 1;
* int ppo = 2;
*
* for (int p = minP; p <= maxP; p = pwr2SeriesNext(ppo, p)) {
* System.out.print(p + " ");
* }
* //generates the following series:
* //1 2 3 4 6 8 11 16 23 32 45 64 91 128 181 256 362 512 724 1024
* }</pre>
*
* @param ppo Points-Per-Octave, or the number of points per integer powers of 2 in the series.
* @param curPoint the current point of the series. Must be ≥ 1.
* @return the next point in the power series.
*/
public static long pwr2SeriesNext(final int ppo, final long curPoint) {
final long cur = curPoint < 1L ? 1L : curPoint;
int gi = (int)round(log2(cur) * ppo); //current generating index
long next;
do {
next = round(pow(2.0, (double) ++gi / ppo));
} while ( next <= curPoint);
return next;
}
/**
* Computes the previous, smaller integer point in the power series
* <i>point = 2<sup>( i / ppo )</sup></i> given the current point in the series.
* For illustration, this can be used in a loop as follows:
*
* <pre>{@code
* int maxP = 1024;
* int minP = 1;
* int ppo = 2;
*
* for (int p = maxP; p >= minP; p = pwr2SeriesPrev(ppo, p)) {
* System.out.print(p + " ");
* }
* //generates the following series:
* //1024 724 512 362 256 181 128 91 64 45 32 23 16 11 8 6 4 3 2 1
* }</pre>
*
* @param ppo Points-Per-Octave, or the number of points per integer powers of 2 in the series.
* @param curPoint the current point of the series. Must be ≥ 1.
* @return the previous, smaller point in the power series.
* A returned value of zero terminates the series.
*/
public static int pwr2SeriesPrev(final int ppo, final int curPoint) {
if (curPoint <= 1) { return 0; }
int gi = (int)round(log2(curPoint) * ppo); //current generating index
int prev;
do {
prev = (int)round(pow(2.0, (double) --gi / ppo));
} while (prev >= curPoint);
return prev;
}
/**
* Computes the next larger double in the power series
* <i>point = logBase<sup>( i / ppb )</sup></i> given the current point in the series.
* For illustration, this can be used in a loop as follows:
*
* <pre>{@code
* double maxP = 1024.0;
* double minP = 1.0;
* int ppb = 2;
* double logBase = 2.0;
*
* for (double p = minP; p <= maxP; p = powerSeriesNextDouble(ppb, p, true, logBase)) {
* System.out.print(p + " ");
* }
* //generates the following series:
* //1 2 3 4 6 8 11 16 23 32 45 64 91 128 181 256 362 512 724 1024
* }</pre>
*
* @param ppb Points-Per-Base, or the number of points per integer powers of base in the series.
* @param curPoint the current point of the series. Must be ≥ 1.0.
* @param roundToLong if true the output will be rounded to the nearest long.
* @param logBase the desired base of the logarithms
* @return the next point in the power series.
*/
public static double powerSeriesNextDouble(final int ppb, final double curPoint,
final boolean roundToLong, final double logBase) {
final double cur = curPoint < 1.0 ? 1.0 : curPoint;
double gi = round(logBaseOfX(logBase, cur) * ppb ); //current generating index
double next;
do {
final double n = pow(logBase, ++gi / ppb);
next = roundToLong ? round(n) : n;
} while (next <= cur);
return next;
}
/**
* Returns the ceiling of a given <i>n</i> given a <i>base</i>, where the ceiling is an integral power of the base.
* This is the smallest positive power of <i>base</i> that is equal to or greater than the given <i>n</i>
* and equal to a mathematical integer.
* The result of this function is consistent with {@link #ceilingPowerOf2(int)} for values
* less than one. I.e., if <i>n < 1,</i> the result is 1.
*
* <p>The formula is: <i>base<sup>ceiling(log<sub>base</sub>(x))</sup></i></p>
*
* @param base The number in the expression ⌈base<sup>n</sup>⌉.
* @param n The input argument.
* @return the ceiling power of <i>base</i> as a double and equal to a mathematical integer.
*/
public static double ceilingPowerBaseOfDouble(final double base, final double n) {
final double x = n < 1.0 ? 1.0 : n;
return Math.round(pow(base, ceil(logBaseOfX(base, x))));
}
/**
* Computes the floor of a given <i>n</i> given <i>base</i>, where the floor is an integral power of the base.
* This is the largest positive power of <i>base</i> that is equal to or less than the given <i>n</i>
* and equal to a mathematical integer.
* The result of this function is consistent with {@link #floorPowerOf2(int)} for values
* less than one. I.e., if <i>n < 1,</i> the result is 1.
*
* <p>The formula is: <i>base<sup>floor(log<sub>base</sub>(x))</sup></i></p>
*
* @param base The number in the expression ⌊base<sup>n</sup>⌋.
* @param n The input argument.
* @return the floor power of 2 and equal to a mathematical integer.
*/
public static double floorPowerBaseOfDouble(final double base, final double n) {
final double x = n < 1.0 ? 1.0 : n;
return Math.round(pow(base, floor(logBaseOfX(base, x))));
}
// Logarithm related
/**
* The log<sub>2</sub>(value)
* @param value the given value
* @return log<sub>2</sub>(value)
*/
public static double log2(final double value) {
return log(value) / LOG2;
}
/**
* Returns the log<sub>base</sub>(x). Example, if base = 2.0: logB(2.0, x) = log(x) / log(2.0).
* @param base The number in the expression log(x) / log(base).
* @param x the given value
* @return the log<sub>base</sub>(x)
*/
public static double logBaseOfX(final double base, final double x) {
return log(x) / log(base);
}
/**
* Returns the number of one bits following the lowest-order ("rightmost") zero-bit in the
* two's complement binary representation of the specified long value, or 64 if the value is equal
* to minus one.
* @param v the value whose number of trailing ones is to be computed.
* @return the number of one bits following the lowest-order ("rightmost") zero-bit in the
* two's complement binary representation of the specified long value, or 64 if the value is equal
* to minus one.
*/
public static int numberOfTrailingOnes(final long v) {
return Long.numberOfTrailingZeros(~v);
}
/**
* Returns the number of one bits preceding the highest-order ("leftmost") zero-bit in the
* two's complement binary representation of the specified long value, or 64 if the value is equal
* to minus one.
* @param v the value whose number of leading ones is to be computed.
* @return the number of one bits preceding the lowest-order ("rightmost") zero-bit in the
* two's complement binary representation of the specified long value, or 64 if the value is equal
* to minus one.
*/
public static int numberOfLeadingOnes(final long v) {
return Long.numberOfLeadingZeros(~v);
}
/**
* Returns the log2 of the given int value if it is an exact power of 2 and greater than zero.
* If not, it throws an exception with the user supplied local argument name.
* @param powerOf2 must be a power of 2 and greater than zero.
* @param argName the argument name used in the exception if thrown.
* @return the log2 of the given value if it is an exact power of 2 and greater than zero.
* @throws SketchesArgumentException if not a power of 2 nor greater than zero.
*/
public static int exactLog2OfInt(final int powerOf2, final String argName) {
checkIfPowerOf2(powerOf2, argName);
return Integer.numberOfTrailingZeros(powerOf2);
}
/**
* Returns the log2 of the given long value if it is an exact power of 2 and greater than zero.
* If not, it throws an exception with the user supplied local argument name.
* @param powerOf2 must be a power of 2 and greater than zero.
* @param argName the argument name used in the exception if thrown.
* @return the log2 of the given value if it is an exact power of 2 and greater than zero.
* @throws SketchesArgumentException if not a power of 2 nor greater than zero.
*/
public static int exactLog2OfLong(final long powerOf2, final String argName) {
checkIfPowerOf2(powerOf2, argName);
return Long.numberOfTrailingZeros(powerOf2);
}
/**
* Returns the log2 of the given int value if it is an exact power of 2 and greater than zero.
* If not, it throws an exception.
* @param powerOf2 must be a power of 2 and greater than zero.
* @return the log2 of the given int value if it is an exact power of 2 and greater than zero.
*/
public static int exactLog2OfInt(final int powerOf2) {
if (!isPowerOf2(powerOf2)) {
throw new SketchesArgumentException("Argument 'powerOf2' must be a positive power of 2.");
}
return Long.numberOfTrailingZeros(powerOf2);
}
/**
* Returns the log2 of the given long value if it is an exact power of 2 and greater than zero.
* If not, it throws an exception.
* @param powerOf2 must be a power of 2 and greater than zero.
* @return the log2 of the given long value if it is an exact power of 2 and greater than zero.
*/
public static int exactLog2OfLong(final long powerOf2) {
if (!isPowerOf2(powerOf2)) {
throw new SketchesArgumentException("Argument 'powerOf2' must be a positive power of 2.");
}
return Long.numberOfTrailingZeros(powerOf2);
}
//Checks that throw
/**
* Check the requested offset and length against the allocated size.
* The invariants equation is: {@code 0 <= reqOff <= reqLen <= reqOff + reqLen <= allocSize}.
* If this equation is violated an {@link SketchesArgumentException} will be thrown.
* @param reqOff the requested offset
* @param reqLen the requested length
* @param allocSize the allocated size.
*/
public static void checkBounds(final long reqOff, final long reqLen, final long allocSize) {
if ((reqOff | reqLen | (reqOff + reqLen) | (allocSize - (reqOff + reqLen))) < 0) {
throw new SketchesArgumentException("Bounds Violation: "
+ "reqOffset: " + reqOff + ", reqLength: " + reqLen
+ ", (reqOff + reqLen): " + (reqOff + reqLen) + ", allocSize: " + allocSize);
}
}
/**
* Checks the given parameter to make sure it is positive and between 0.0 inclusive and 1.0
* inclusive.
*
* @param p
* <a href="{@docRoot}/resources/dictionary.html#p">See Sampling Probability, <i>p</i></a>
* @param argName Used in the thrown exception.
*/
public static void checkProbability(final double p, final String argName) {
if ((p >= 0.0) && (p <= 1.0)) {
return;
}
throw new SketchesArgumentException("The value of the parameter \"" + argName
+ "\" must be between 0.0 inclusive and 1.0 inclusive: " + p);
}
//Boolean Checks
/**
* Unsigned compare with longs.
* @param n1 A long to be treated as if unsigned.
* @param n2 A long to be treated as if unsigned.
* @return true if n1 > n2.
*/
public static boolean isLessThanUnsigned(final long n1, final long n2) {
return (n1 < n2) ^ ((n1 < 0) != (n2 < 0));
}
/**
* Returns true if given n is even.
* @param n the given n
* @return true if given n is even.
*/
public static boolean isEven(final long n) {
return (n & 1L) == 0;
}
/**
* Returns true if given n is odd.
* @param n the given n
* @return true if given n is odd.
*/
public static boolean isOdd(final long n) {
return (n & 1L) == 1L;
}
//Other
/**
* Returns a one if the bit at bitPos is a one, otherwise zero.
* @param number the number to examine
* @param bitPos the given zero-based bit position, where the least significant
* bit is at position zero.
* @return a one if the bit at bitPos is a one, otherwise zero.
*/
public static int bitAt(final long number, final int bitPos) {
return (number & (1L << bitPos)) > 0 ? 1 : 0;
}
/**
* Computes the number of decimal digits of the number n
* @param n the given number
* @return the number of decimal digits of the number n
*/
public static int numDigits(long n) {
if ((n % 10) == 0) { n++; }
return (int) ceil(log(n) / log(10));
}
/**
* Converts the given number to a string prepended with spaces, if necessary, to
* match the given length.
*
* <p>For example, assume a sequence of integers from 1 to 1000. The largest value has
* four decimal digits. Convert the entire sequence of strings to the form " 1" to "1000".
* When these strings are sorted they will be in numerical sequence: " 1", " 2", ... "1000".</p>
*
* @param number the given number
* @param length the desired string length.
* @return the given number to a string prepended with spaces
*/
public static String longToFixedLengthString(final long number, final int length) {
final String num = Long.toString(number);
return characterPad(num, length, ' ', false);
}
//Generic tests
/**
* Finds the minimum of two generic items
* @param <T> the type
* @param item1 item one
* @param item2 item two
* @param c the given comparator
* @return the minimum value
*/
public static <T> Object minT(final Object item1, final Object item2, final Comparator<? super T> c) {
return c.compare((T)item1, (T)item2) <= 0 ? item1 : item2;
}
/**
* Finds the maximum of two generic items
* @param <T> the type
* @param item1 item one
* @param item2 item two
* @param c the given comparator
* @return the maximum value
*/
public static <T> Object maxT(final Object item1, final Object item2, final Comparator<? super T> c) {
return c.compare((T)item1, (T)item2) >= 0 ? item1 : item2;
}
/**
* Is item1 Less-Than item2?
* @param <T> the type
* @param item1 item one
* @param item2 item two
* @param c the given comparator
* @return true if item1 Less-Than item2
*/
public static <T> boolean lt(final Object item1, final Object item2, final Comparator<? super T> c) {
return c.compare((T)item1, (T)item2) < 0;
}
/**
* Is item1 Less-Than-Or-Equal-To item2?
* @param <T> the type
* @param item1 item one
* @param item2 item two
* @param c the given comparator
* @return true if item1 Less-Than-Or-Equal-To item2
*/
public static <T> boolean le(final Object item1, final Object item2, final Comparator<? super T> c) {
return c.compare((T)item1, (T)item2) <= 0;
}
//MemorySegment related
/**
* Clears all bytes of this MemorySegment to zero.
* @param seg the given MemorySegment
*/
public static void clear(final MemorySegment seg) {
seg.fill((byte)0);
}
/**
* Clears a portion of this MemorySegment to zero.
* @param seg the given MemorySegment
* @param offsetBytes offset bytes relative to this MemorySegment start
* @param lengthBytes the length in bytes
*/
public static void clear(final MemorySegment seg, final long offsetBytes, final long lengthBytes) {
final MemorySegment slice = seg.asSlice(offsetBytes, lengthBytes);
slice.fill((byte)0);
}
/**
* Clears the bits defined by the bitMask
* @param seg the given MemorySegment
* @param offsetBytes offset bytes relative to this Memory start.
* @param bitMask the bits set to one will be cleared
*/
public static void clearBits(final MemorySegment seg, final long offsetBytes, final byte bitMask) {
final byte b = seg.get(JAVA_BYTE, offsetBytes);
seg.set(JAVA_BYTE, offsetBytes, (byte)(b & ~bitMask));
}
/**
* Returns true if both segments have the same contents and the same length.
* @param seg1 the given MemorySegment #1
* @param seg2 the given MemorySegment #2
* @return true if both segments have the same contents and the same length.
*/
public static boolean equalContents(final MemorySegment seg1, final MemorySegment seg2) {
if (seg1.byteSize() != seg2.byteSize()) { return false; }
return equalContents(seg1, 0, seg2, 0, seg1.byteSize());
}
/**
* Returns true if both segments have the same content for the specified region.
* @param seg1 the given MemorySegment #1
* @param seg1offsetBytes the starting offset for MemorySegment #1 in bytes.
* @param seg2 the given MemorySegment #2
* @param seg2offsetBytes the starting offset for MemorySegment #2 in bytes.
* @param lengthBytes the length of the region to be compared, in bytes.
* @return true, if both segments have the content for the specified region.
*/
public static boolean equalContents(
final MemorySegment seg1,
final long seg1offsetBytes,
final MemorySegment seg2,
final long seg2offsetBytes,
final long lengthBytes) {
if (seg1.equals(seg2) && (seg1.byteSize() == seg2.byteSize())) { return true; } //identical segments
final long seg1EndOff = seg1offsetBytes + lengthBytes;
final long seg2EndOff = seg2offsetBytes + lengthBytes;
return MemorySegment.mismatch(seg1, seg1offsetBytes, seg1EndOff, seg2, seg2offsetBytes, seg2EndOff) == -1;
}
/**
* Fills a portion of this Memory region to the given byte value.
* @param seg the given MemorySegment
* @param offsetBytes offset bytes relative to this Memory start
* @param lengthBytes the length in bytes
* @param value the given byte value
*/
public static void fill(final MemorySegment seg, final long offsetBytes, final long lengthBytes, final byte value) {
final MemorySegment slice = seg.asSlice(offsetBytes, lengthBytes);
slice.fill(value);
}
/**
* Request a new heap MemorySegment with the given capacityBytes and either 8-byte aligned or one byte aligned.
*
* <p>If <i>aligned</i> is true, the returned MemorySegment will be constructed from a <i>long[]</i> array,
* and, as a result, it will have a memory alignment of 8 bytes.
* If the requested capacity is not exactly divisible by eight, the returned size
* will be rolled up to the next multiple of eight bytes.</p>
*
* <p>If <i>aligned</i> is false, the returned MemorySegment will be constructed from a <i>byte[]</i> array,
* and have a memory alignment of 1 byte.
*
* @param capacityBytes The new capacity being requested. It must not be negative and cannot exceed Integer.MAX_VALUE.
* @param aligned if true, the new heap segment will have an alignment of 8 bytes, otherwise the alignment will be 1 byte.
* @return a new MemorySegment with the requested capacity and alignment.
*/
public static MemorySegment alignedHeapSegment(final int capacityBytes, final boolean aligned) {
if (aligned) {
final int lenLongs = capacityBytes >>> 3;
final long[] array = ((capacityBytes & 0x7) == 0)
? new long[lenLongs]
: new long[lenLongs + 1];
return MemorySegment.ofArray(array);
}
return MemorySegment.ofArray(new byte[capacityBytes]);
}
/**
* Sets the bits defined by the bitMask
* @param seg the given MemorySegment
* @param offsetBytes offset bytes relative to this MemorySegment start
* @param bitMask the bits set to one will be set
*/
public static void setBits(final MemorySegment seg, final long offsetBytes, final byte bitMask) {
final byte b = seg.get(JAVA_BYTE, offsetBytes);
seg.set(JAVA_BYTE, offsetBytes, (byte)(b | bitMask));
}
/**
* Computes and checks the 16-bit seed hash from the given long seed.
* The seed hash may not be zero in order to maintain compatibility with older serialized
* versions that did not have this concept.
* @param seed <a href="{@docRoot}/resources/dictionary.html#seed">See Update Hash Seed</a>
* @return the seed hash.
*/
public static short computeSeedHash(final long seed) {
final long[] seedArr = {seed};
final short seedHash = (short)(hash(seedArr, 0L)[0] & 0xFFFFL);
if (seedHash == 0) {
throw new SketchesArgumentException(
"The given seed: " + seed + " produced a seedHash of zero. "
+ "You must choose a different seed.");
}
return seedHash;
}
/**
* Check if the two seed hashes are equal. If not, throw an SketchesArgumentException.
* @param seedHashA the seedHash A
* @param seedHashB the seedHash B
* @return seedHashA if they are equal
*/
public static short checkSeedHashes(final short seedHashA, final short seedHashB) {
if (seedHashA != seedHashB) {
throw new SketchesArgumentException(
"Incompatible Seed Hashes. " + Integer.toHexString(seedHashA & 0XFFFF)
+ ", " + Integer.toHexString(seedHashB & 0XFFFF));
}
return seedHashA;
}
}
|
googleapis/google-cloud-java | 37,455 | java-run/proto-google-cloud-run-v2/src/main/java/com/google/cloud/run/v2/UpdateServiceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/run/v2/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.run.v2;
/**
*
*
* <pre>
* Request message for updating a service.
* </pre>
*
* Protobuf type {@code google.cloud.run.v2.UpdateServiceRequest}
*/
public final class UpdateServiceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.run.v2.UpdateServiceRequest)
UpdateServiceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateServiceRequest.newBuilder() to construct.
private UpdateServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateServiceRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateServiceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.run.v2.ServiceProto
.internal_static_google_cloud_run_v2_UpdateServiceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.run.v2.ServiceProto
.internal_static_google_cloud_run_v2_UpdateServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.run.v2.UpdateServiceRequest.class,
com.google.cloud.run.v2.UpdateServiceRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int SERVICE_FIELD_NUMBER = 1;
private com.google.cloud.run.v2.Service service_;
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the service field is set.
*/
@java.lang.Override
public boolean hasService() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The service.
*/
@java.lang.Override
public com.google.cloud.run.v2.Service getService() {
return service_ == null ? com.google.cloud.run.v2.Service.getDefaultInstance() : service_;
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.run.v2.ServiceOrBuilder getServiceOrBuilder() {
return service_ == null ? com.google.cloud.run.v2.Service.getDefaultInstance() : service_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 3;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* Indicates that the request should be validated and default values
* populated, without persisting the request or updating any resources.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
public static final int ALLOW_MISSING_FIELD_NUMBER = 4;
private boolean allowMissing_ = false;
/**
*
*
* <pre>
* Optional. If set to true, and if the Service does not exist, it will create
* a new one. The caller must have 'run.services.create' permissions if this
* is set to true and the Service does not exist.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(1, getService());
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (validateOnly_ != false) {
output.writeBool(3, validateOnly_);
}
if (allowMissing_ != false) {
output.writeBool(4, allowMissing_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getService());
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_);
}
if (allowMissing_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, allowMissing_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.run.v2.UpdateServiceRequest)) {
return super.equals(obj);
}
com.google.cloud.run.v2.UpdateServiceRequest other =
(com.google.cloud.run.v2.UpdateServiceRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasService() != other.hasService()) return false;
if (hasService()) {
if (!getService().equals(other.getService())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (getAllowMissing() != other.getAllowMissing()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasService()) {
hash = (37 * hash) + SERVICE_FIELD_NUMBER;
hash = (53 * hash) + getService().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
hash = (37 * hash) + ALLOW_MISSING_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAllowMissing());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.UpdateServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.run.v2.UpdateServiceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for updating a service.
* </pre>
*
* Protobuf type {@code google.cloud.run.v2.UpdateServiceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.run.v2.UpdateServiceRequest)
com.google.cloud.run.v2.UpdateServiceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.run.v2.ServiceProto
.internal_static_google_cloud_run_v2_UpdateServiceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.run.v2.ServiceProto
.internal_static_google_cloud_run_v2_UpdateServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.run.v2.UpdateServiceRequest.class,
com.google.cloud.run.v2.UpdateServiceRequest.Builder.class);
}
// Construct using com.google.cloud.run.v2.UpdateServiceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getServiceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
service_ = null;
if (serviceBuilder_ != null) {
serviceBuilder_.dispose();
serviceBuilder_ = null;
}
validateOnly_ = false;
allowMissing_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.run.v2.ServiceProto
.internal_static_google_cloud_run_v2_UpdateServiceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.run.v2.UpdateServiceRequest getDefaultInstanceForType() {
return com.google.cloud.run.v2.UpdateServiceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.run.v2.UpdateServiceRequest build() {
com.google.cloud.run.v2.UpdateServiceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.run.v2.UpdateServiceRequest buildPartial() {
com.google.cloud.run.v2.UpdateServiceRequest result =
new com.google.cloud.run.v2.UpdateServiceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.run.v2.UpdateServiceRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.service_ = serviceBuilder_ == null ? service_ : serviceBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.validateOnly_ = validateOnly_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.allowMissing_ = allowMissing_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.run.v2.UpdateServiceRequest) {
return mergeFrom((com.google.cloud.run.v2.UpdateServiceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.run.v2.UpdateServiceRequest other) {
if (other == com.google.cloud.run.v2.UpdateServiceRequest.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasService()) {
mergeService(other.getService());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
if (other.getAllowMissing() != false) {
setAllowMissing(other.getAllowMissing());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getServiceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 18
case 24:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
allowMissing_ = input.readBool();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.run.v2.Service service_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.run.v2.Service,
com.google.cloud.run.v2.Service.Builder,
com.google.cloud.run.v2.ServiceOrBuilder>
serviceBuilder_;
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the service field is set.
*/
public boolean hasService() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The service.
*/
public com.google.cloud.run.v2.Service getService() {
if (serviceBuilder_ == null) {
return service_ == null ? com.google.cloud.run.v2.Service.getDefaultInstance() : service_;
} else {
return serviceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setService(com.google.cloud.run.v2.Service value) {
if (serviceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
service_ = value;
} else {
serviceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setService(com.google.cloud.run.v2.Service.Builder builderForValue) {
if (serviceBuilder_ == null) {
service_ = builderForValue.build();
} else {
serviceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeService(com.google.cloud.run.v2.Service value) {
if (serviceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& service_ != null
&& service_ != com.google.cloud.run.v2.Service.getDefaultInstance()) {
getServiceBuilder().mergeFrom(value);
} else {
service_ = value;
}
} else {
serviceBuilder_.mergeFrom(value);
}
if (service_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearService() {
bitField0_ = (bitField0_ & ~0x00000002);
service_ = null;
if (serviceBuilder_ != null) {
serviceBuilder_.dispose();
serviceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.run.v2.Service.Builder getServiceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getServiceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.run.v2.ServiceOrBuilder getServiceOrBuilder() {
if (serviceBuilder_ != null) {
return serviceBuilder_.getMessageOrBuilder();
} else {
return service_ == null ? com.google.cloud.run.v2.Service.getDefaultInstance() : service_;
}
}
/**
*
*
* <pre>
* Required. The Service to be updated.
* </pre>
*
* <code>.google.cloud.run.v2.Service service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.run.v2.Service,
com.google.cloud.run.v2.Service.Builder,
com.google.cloud.run.v2.ServiceOrBuilder>
getServiceFieldBuilder() {
if (serviceBuilder_ == null) {
serviceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.run.v2.Service,
com.google.cloud.run.v2.Service.Builder,
com.google.cloud.run.v2.ServiceOrBuilder>(
getService(), getParentForChildren(), isClean());
service_ = null;
}
return serviceBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* Indicates that the request should be validated and default values
* populated, without persisting the request or updating any resources.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* Indicates that the request should be validated and default values
* populated, without persisting the request or updating any resources.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates that the request should be validated and default values
* populated, without persisting the request or updating any resources.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000004);
validateOnly_ = false;
onChanged();
return this;
}
private boolean allowMissing_;
/**
*
*
* <pre>
* Optional. If set to true, and if the Service does not exist, it will create
* a new one. The caller must have 'run.services.create' permissions if this
* is set to true and the Service does not exist.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
/**
*
*
* <pre>
* Optional. If set to true, and if the Service does not exist, it will create
* a new one. The caller must have 'run.services.create' permissions if this
* is set to true and the Service does not exist.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The allowMissing to set.
* @return This builder for chaining.
*/
public Builder setAllowMissing(boolean value) {
allowMissing_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, and if the Service does not exist, it will create
* a new one. The caller must have 'run.services.create' permissions if this
* is set to true and the Service does not exist.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearAllowMissing() {
bitField0_ = (bitField0_ & ~0x00000008);
allowMissing_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.run.v2.UpdateServiceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.run.v2.UpdateServiceRequest)
private static final com.google.cloud.run.v2.UpdateServiceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.run.v2.UpdateServiceRequest();
}
public static com.google.cloud.run.v2.UpdateServiceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateServiceRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateServiceRequest>() {
@java.lang.Override
public UpdateServiceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateServiceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateServiceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.run.v2.UpdateServiceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-common | 37,306 | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import org.junit.Before;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
import org.apache.tools.tar.TarEntry;
import org.apache.tools.tar.TarOutputStream;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestFileUtil {
private static final Log LOG = LogFactory.getLog(TestFileUtil.class);
private static final String TEST_ROOT_DIR = System.getProperty(
"test.build.data", "/tmp") + "/fu";
private static final File TEST_DIR = new File(TEST_ROOT_DIR);
private static final String FILE = "x";
private static final String LINK = "y";
private static final String DIR = "dir";
private final File del = new File(TEST_DIR, "del");
private final File tmp = new File(TEST_DIR, "tmp");
private final File dir1 = new File(del, DIR + "1");
private final File dir2 = new File(del, DIR + "2");
private final File partitioned = new File(TEST_DIR, "partitioned");
/**
* Creates multiple directories for testing.
*
* Contents of them are
* dir:tmp:
* file: x
* dir:del:
* file: x
* dir: dir1 : file:x
* dir: dir2 : file:x
* link: y to tmp/x
* link: tmpDir to tmp
* dir:partitioned:
* file: part-r-00000, contents: "foo"
* file: part-r-00001, contents: "bar"
*/
private void setupDirs() throws IOException {
Assert.assertFalse(del.exists());
Assert.assertFalse(tmp.exists());
Assert.assertFalse(partitioned.exists());
del.mkdirs();
tmp.mkdirs();
partitioned.mkdirs();
new File(del, FILE).createNewFile();
File tmpFile = new File(tmp, FILE);
tmpFile.createNewFile();
// create directories
dir1.mkdirs();
dir2.mkdirs();
new File(dir1, FILE).createNewFile();
new File(dir2, FILE).createNewFile();
// create a symlink to file
File link = new File(del, LINK);
FileUtil.symLink(tmpFile.toString(), link.toString());
// create a symlink to dir
File linkDir = new File(del, "tmpDir");
FileUtil.symLink(tmp.toString(), linkDir.toString());
Assert.assertEquals(5, del.listFiles().length);
// create files in partitioned directories
createFile(partitioned, "part-r-00000", "foo");
createFile(partitioned, "part-r-00001", "bar");
// create a cycle using symlinks. Cycles should be handled
FileUtil.symLink(del.toString(), dir1.toString() + "/cycle");
}
/**
* Creates a new file in the specified directory, with the specified name and
* the specified file contents. This method will add a newline terminator to
* the end of the contents string in the destination file.
* @param directory File non-null destination directory.
* @param name String non-null file name.
* @param contents String non-null file contents.
* @throws IOException if an I/O error occurs.
*/
private File createFile(File directory, String name, String contents)
throws IOException {
File newFile = new File(directory, name);
PrintWriter pw = new PrintWriter(newFile);
try {
pw.println(contents);
}
finally {
pw.close();
}
return newFile;
}
@Test (timeout = 30000)
public void testListFiles() throws IOException {
setupDirs();
//Test existing files case
File[] files = FileUtil.listFiles(partitioned);
Assert.assertEquals(2, files.length);
//Test existing directory with no files case
File newDir = new File(tmp.getPath(),"test");
newDir.mkdir();
Assert.assertTrue("Failed to create test dir", newDir.exists());
files = FileUtil.listFiles(newDir);
Assert.assertEquals(0, files.length);
newDir.delete();
Assert.assertFalse("Failed to delete test dir", newDir.exists());
//Test non-existing directory case, this throws
//IOException
try {
files = FileUtil.listFiles(newDir);
Assert.fail("IOException expected on listFiles() for non-existent dir "
+ newDir.toString());
} catch(IOException ioe) {
//Expected an IOException
}
}
@Test (timeout = 30000)
public void testListAPI() throws IOException {
setupDirs();
//Test existing files case
String[] files = FileUtil.list(partitioned);
Assert.assertEquals("Unexpected number of pre-existing files", 2, files.length);
//Test existing directory with no files case
File newDir = new File(tmp.getPath(),"test");
newDir.mkdir();
Assert.assertTrue("Failed to create test dir", newDir.exists());
files = FileUtil.list(newDir);
Assert.assertEquals("New directory unexpectedly contains files", 0, files.length);
newDir.delete();
Assert.assertFalse("Failed to delete test dir", newDir.exists());
//Test non-existing directory case, this throws
//IOException
try {
files = FileUtil.list(newDir);
Assert.fail("IOException expected on list() for non-existent dir "
+ newDir.toString());
} catch(IOException ioe) {
//Expected an IOException
}
}
@Before
public void before() throws IOException {
cleanupImpl();
}
@After
public void tearDown() throws IOException {
cleanupImpl();
}
private void cleanupImpl() throws IOException {
FileUtil.fullyDelete(del, true);
Assert.assertTrue(!del.exists());
FileUtil.fullyDelete(tmp, true);
Assert.assertTrue(!tmp.exists());
FileUtil.fullyDelete(partitioned, true);
Assert.assertTrue(!partitioned.exists());
}
@Test (timeout = 30000)
public void testFullyDelete() throws IOException {
setupDirs();
boolean ret = FileUtil.fullyDelete(del);
Assert.assertTrue(ret);
Assert.assertFalse(del.exists());
validateTmpDir();
}
/**
* Tests if fullyDelete deletes
* (a) symlink to file only and not the file pointed to by symlink.
* (b) symlink to dir only and not the dir pointed to by symlink.
* @throws IOException
*/
@Test (timeout = 30000)
public void testFullyDeleteSymlinks() throws IOException {
setupDirs();
File link = new File(del, LINK);
Assert.assertEquals(5, del.list().length);
// Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not
// delete contents of tmp. See setupDirs for details.
boolean ret = FileUtil.fullyDelete(link);
Assert.assertTrue(ret);
Assert.assertFalse(link.exists());
Assert.assertEquals(4, del.list().length);
validateTmpDir();
File linkDir = new File(del, "tmpDir");
// Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not
// delete contents of tmp. See setupDirs for details.
ret = FileUtil.fullyDelete(linkDir);
Assert.assertTrue(ret);
Assert.assertFalse(linkDir.exists());
Assert.assertEquals(3, del.list().length);
validateTmpDir();
}
/**
* Tests if fullyDelete deletes
* (a) dangling symlink to file properly
* (b) dangling symlink to directory properly
* @throws IOException
*/
@Test (timeout = 30000)
public void testFullyDeleteDanglingSymlinks() throws IOException {
setupDirs();
// delete the directory tmp to make tmpDir a dangling link to dir tmp and
// to make y as a dangling link to file tmp/x
boolean ret = FileUtil.fullyDelete(tmp);
Assert.assertTrue(ret);
Assert.assertFalse(tmp.exists());
// dangling symlink to file
File link = new File(del, LINK);
Assert.assertEquals(5, del.list().length);
// Even though 'y' is dangling symlink to file tmp/x, fullyDelete(y)
// should delete 'y' properly.
ret = FileUtil.fullyDelete(link);
Assert.assertTrue(ret);
Assert.assertEquals(4, del.list().length);
// dangling symlink to directory
File linkDir = new File(del, "tmpDir");
// Even though tmpDir is dangling symlink to tmp, fullyDelete(tmpDir) should
// delete tmpDir properly.
ret = FileUtil.fullyDelete(linkDir);
Assert.assertTrue(ret);
Assert.assertEquals(3, del.list().length);
}
@Test (timeout = 30000)
public void testFullyDeleteContents() throws IOException {
setupDirs();
boolean ret = FileUtil.fullyDeleteContents(del);
Assert.assertTrue(ret);
Assert.assertTrue(del.exists());
Assert.assertEquals(0, del.listFiles().length);
validateTmpDir();
}
private void validateTmpDir() {
Assert.assertTrue(tmp.exists());
Assert.assertEquals(1, tmp.listFiles().length);
Assert.assertTrue(new File(tmp, FILE).exists());
}
private final File xSubDir = new File(del, "xSubDir");
private final File xSubSubDir = new File(xSubDir, "xSubSubDir");
private final File ySubDir = new File(del, "ySubDir");
private static final String file1Name = "file1";
private final File file2 = new File(xSubDir, "file2");
private final File file22 = new File(xSubSubDir, "file22");
private final File file3 = new File(ySubDir, "file3");
private final File zlink = new File(del, "zlink");
/**
* Creates a directory which can not be deleted completely.
*
* Directory structure. The naming is important in that {@link MyFile}
* is used to return them in alphabetical order when listed.
*
* del(+w)
* |
* .---------------------------------------,
* | | | |
* file1(!w) xSubDir(-rwx) ySubDir(+w) zlink
* | | |
* | file2(-rwx) file3
* |
* xSubSubDir(-rwx)
* |
* file22(-rwx)
*
* @throws IOException
*/
private void setupDirsAndNonWritablePermissions() throws IOException {
Assert.assertFalse("The directory del should not have existed!",
del.exists());
del.mkdirs();
new MyFile(del, file1Name).createNewFile();
// "file1" is non-deletable by default, see MyFile.delete().
xSubDir.mkdirs();
file2.createNewFile();
xSubSubDir.mkdirs();
file22.createNewFile();
revokePermissions(file22);
revokePermissions(xSubSubDir);
revokePermissions(file2);
revokePermissions(xSubDir);
ySubDir.mkdirs();
file3.createNewFile();
Assert.assertFalse("The directory tmp should not have existed!",
tmp.exists());
tmp.mkdirs();
File tmpFile = new File(tmp, FILE);
tmpFile.createNewFile();
FileUtil.symLink(tmpFile.toString(), zlink.toString());
}
private static void grantPermissions(final File f) {
FileUtil.setReadable(f, true);
FileUtil.setWritable(f, true);
FileUtil.setExecutable(f, true);
}
private static void revokePermissions(final File f) {
FileUtil.setWritable(f, false);
FileUtil.setExecutable(f, false);
FileUtil.setReadable(f, false);
}
// Validates the return value.
// Validates the existence of the file "file1"
private void validateAndSetWritablePermissions(
final boolean expectedRevokedPermissionDirsExist, final boolean ret) {
grantPermissions(xSubDir);
grantPermissions(xSubSubDir);
Assert.assertFalse("The return value should have been false.", ret);
Assert.assertTrue("The file file1 should not have been deleted.",
new File(del, file1Name).exists());
Assert.assertEquals(
"The directory xSubDir *should* not have been deleted.",
expectedRevokedPermissionDirsExist, xSubDir.exists());
Assert.assertEquals("The file file2 *should* not have been deleted.",
expectedRevokedPermissionDirsExist, file2.exists());
Assert.assertEquals(
"The directory xSubSubDir *should* not have been deleted.",
expectedRevokedPermissionDirsExist, xSubSubDir.exists());
Assert.assertEquals("The file file22 *should* not have been deleted.",
expectedRevokedPermissionDirsExist, file22.exists());
Assert.assertFalse("The directory ySubDir should have been deleted.",
ySubDir.exists());
Assert.assertFalse("The link zlink should have been deleted.",
zlink.exists());
}
@Test (timeout = 30000)
public void testFailFullyDelete() throws IOException {
if(Shell.WINDOWS) {
// windows Dir.setWritable(false) does not work for directories
return;
}
LOG.info("Running test to verify failure of fullyDelete()");
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDelete(new MyFile(del));
validateAndSetWritablePermissions(true, ret);
}
@Test (timeout = 30000)
public void testFailFullyDeleteGrantPermissions() throws IOException {
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDelete(new MyFile(del), true);
// this time the directories with revoked permissions *should* be deleted:
validateAndSetWritablePermissions(false, ret);
}
/**
* Extend {@link File}. Same as {@link File} except for two things: (1) This
* treats file1Name as a very special file which is not delete-able
* irrespective of it's parent-dir's permissions, a peculiar file instance for
* testing. (2) It returns the files in alphabetically sorted order when
* listed.
*
*/
public static class MyFile extends File {
private static final long serialVersionUID = 1L;
public MyFile(File f) {
super(f.getAbsolutePath());
}
public MyFile(File parent, String child) {
super(parent, child);
}
/**
* Same as {@link File#delete()} except for file1Name which will never be
* deleted (hard-coded)
*/
@Override
public boolean delete() {
LOG.info("Trying to delete myFile " + getAbsolutePath());
boolean bool = false;
if (getName().equals(file1Name)) {
bool = false;
} else {
bool = super.delete();
}
if (bool) {
LOG.info("Deleted " + getAbsolutePath() + " successfully");
} else {
LOG.info("Cannot delete " + getAbsolutePath());
}
return bool;
}
/**
* Return the list of files in an alphabetically sorted order
*/
@Override
public File[] listFiles() {
final File[] files = super.listFiles();
if (files == null) {
return null;
}
List<File> filesList = Arrays.asList(files);
Collections.sort(filesList);
File[] myFiles = new MyFile[files.length];
int i=0;
for(File f : filesList) {
myFiles[i++] = new MyFile(f);
}
return myFiles;
}
}
@Test (timeout = 30000)
public void testFailFullyDeleteContents() throws IOException {
if(Shell.WINDOWS) {
// windows Dir.setWritable(false) does not work for directories
return;
}
LOG.info("Running test to verify failure of fullyDeleteContents()");
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
validateAndSetWritablePermissions(true, ret);
}
@Test (timeout = 30000)
public void testFailFullyDeleteContentsGrantPermissions() throws IOException {
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true);
// this time the directories with revoked permissions *should* be deleted:
validateAndSetWritablePermissions(false, ret);
}
@Test (timeout = 30000)
public void testCopyMergeSingleDirectory() throws IOException {
setupDirs();
boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
Assert.assertTrue("Expected successful copyMerge result.", copyMergeResult);
File merged = new File(TEST_DIR, "tmp/merged");
Assert.assertTrue("File tmp/merged must exist after copyMerge.",
merged.exists());
BufferedReader rdr = new BufferedReader(new FileReader(merged));
try {
Assert.assertEquals("Line 1 of merged file must contain \"foo\".",
"foo", rdr.readLine());
Assert.assertEquals("Line 2 of merged file must contain \"bar\".",
"bar", rdr.readLine());
Assert.assertNull("Expected end of file reading merged file.",
rdr.readLine());
}
finally {
rdr.close();
}
}
/**
* Calls FileUtil.copyMerge using the specified source and destination paths.
* Both source and destination are assumed to be on the local file system.
* The call will not delete source on completion and will not add an
* additional string between files.
* @param src String non-null source path.
* @param dst String non-null destination path.
* @return boolean true if the call to FileUtil.copyMerge was successful.
* @throws IOException if an I/O error occurs.
*/
private boolean copyMerge(String src, String dst)
throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
final boolean result;
try {
Path srcPath = new Path(TEST_ROOT_DIR, src);
Path dstPath = new Path(TEST_ROOT_DIR, dst);
boolean deleteSource = false;
String addString = null;
result = FileUtil.copyMerge(fs, srcPath, fs, dstPath, deleteSource, conf,
addString);
}
finally {
fs.close();
}
return result;
}
/**
* Test that getDU is able to handle cycles caused due to symbolic links
* and that directory sizes are not added to the final calculated size
* @throws IOException
*/
@Test (timeout = 30000)
public void testGetDU() throws Exception {
setupDirs();
long du = FileUtil.getDU(TEST_DIR);
// Only two files (in partitioned). Each has 3 characters + system-specific
// line separator.
final long expected = 2 * (3 + System.getProperty("line.separator").length());
Assert.assertEquals(expected, du);
// target file does not exist:
final File doesNotExist = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
long duDoesNotExist = FileUtil.getDU(doesNotExist);
assertEquals(0, duDoesNotExist);
// target file is not a directory:
File notADirectory = new File(partitioned, "part-r-00000");
long duNotADirectoryActual = FileUtil.getDU(notADirectory);
long duNotADirectoryExpected = 3 + System.getProperty("line.separator").length();
assertEquals(duNotADirectoryExpected, duNotADirectoryActual);
try {
// one of target files is not accessible, but the containing directory
// is accessible:
try {
FileUtil.chmod(notADirectory.getAbsolutePath(), "0000");
} catch (InterruptedException ie) {
// should never happen since that method never throws InterruptedException.
assertNull(ie);
}
assertFalse(FileUtil.canRead(notADirectory));
final long du3 = FileUtil.getDU(partitioned);
assertEquals(expected, du3);
// some target files and containing directory are not accessible:
try {
FileUtil.chmod(partitioned.getAbsolutePath(), "0000");
} catch (InterruptedException ie) {
// should never happen since that method never throws InterruptedException.
assertNull(ie);
}
assertFalse(FileUtil.canRead(partitioned));
final long du4 = FileUtil.getDU(partitioned);
assertEquals(0, du4);
} finally {
// Restore the permissions so that we can delete the folder
// in @After method:
FileUtil.chmod(partitioned.getAbsolutePath(), "0777", true/*recursive*/);
}
}
@Test (timeout = 30000)
public void testUnTar() throws IOException {
setupDirs();
// make a simple tar:
final File simpleTar = new File(del, FILE);
OutputStream os = new FileOutputStream(simpleTar);
TarOutputStream tos = new TarOutputStream(os);
try {
TarEntry te = new TarEntry("foo");
byte[] data = "some-content".getBytes("UTF-8");
te.setSize(data.length);
tos.putNextEntry(te);
tos.write(data);
tos.closeEntry();
tos.flush();
tos.finish();
} finally {
tos.close();
}
// successfully untar it into an existing dir:
FileUtil.unTar(simpleTar, tmp);
// check result:
assertTrue(new File(tmp, "foo").exists());
assertEquals(12, new File(tmp, "foo").length());
final File regularFile = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
regularFile.createNewFile();
assertTrue(regularFile.exists());
try {
FileUtil.unTar(simpleTar, regularFile);
assertTrue("An IOException expected.", false);
} catch (IOException ioe) {
// okay
}
}
@Test (timeout = 30000)
public void testReplaceFile() throws IOException {
setupDirs();
final File srcFile = new File(tmp, "src");
// src exists, and target does not exist:
srcFile.createNewFile();
assertTrue(srcFile.exists());
final File targetFile = new File(tmp, "target");
assertTrue(!targetFile.exists());
FileUtil.replaceFile(srcFile, targetFile);
assertTrue(!srcFile.exists());
assertTrue(targetFile.exists());
// src exists and target is a regular file:
srcFile.createNewFile();
assertTrue(srcFile.exists());
FileUtil.replaceFile(srcFile, targetFile);
assertTrue(!srcFile.exists());
assertTrue(targetFile.exists());
// src exists, and target is a non-empty directory:
srcFile.createNewFile();
assertTrue(srcFile.exists());
targetFile.delete();
targetFile.mkdirs();
File obstacle = new File(targetFile, "obstacle");
obstacle.createNewFile();
assertTrue(obstacle.exists());
assertTrue(targetFile.exists() && targetFile.isDirectory());
try {
FileUtil.replaceFile(srcFile, targetFile);
assertTrue(false);
} catch (IOException ioe) {
// okay
}
// check up the post-condition: nothing is deleted:
assertTrue(srcFile.exists());
assertTrue(targetFile.exists() && targetFile.isDirectory());
assertTrue(obstacle.exists());
}
@Test (timeout = 30000)
public void testCreateLocalTempFile() throws IOException {
setupDirs();
final File baseFile = new File(tmp, "base");
File tmp1 = FileUtil.createLocalTempFile(baseFile, "foo", false);
File tmp2 = FileUtil.createLocalTempFile(baseFile, "foo", true);
assertFalse(tmp1.getAbsolutePath().equals(baseFile.getAbsolutePath()));
assertFalse(tmp2.getAbsolutePath().equals(baseFile.getAbsolutePath()));
assertTrue(tmp1.exists() && tmp2.exists());
assertTrue(tmp1.canWrite() && tmp2.canWrite());
assertTrue(tmp1.canRead() && tmp2.canRead());
tmp1.delete();
tmp2.delete();
assertTrue(!tmp1.exists() && !tmp2.exists());
}
@Test (timeout = 30000)
public void testUnZip() throws IOException {
// make sa simple zip
setupDirs();
// make a simple tar:
final File simpleZip = new File(del, FILE);
OutputStream os = new FileOutputStream(simpleZip);
ZipOutputStream tos = new ZipOutputStream(os);
try {
ZipEntry ze = new ZipEntry("foo");
byte[] data = "some-content".getBytes("UTF-8");
ze.setSize(data.length);
tos.putNextEntry(ze);
tos.write(data);
tos.closeEntry();
tos.flush();
tos.finish();
} finally {
tos.close();
}
// successfully untar it into an existing dir:
FileUtil.unZip(simpleZip, tmp);
// check result:
assertTrue(new File(tmp, "foo").exists());
assertEquals(12, new File(tmp, "foo").length());
final File regularFile = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
regularFile.createNewFile();
assertTrue(regularFile.exists());
try {
FileUtil.unZip(simpleZip, regularFile);
assertTrue("An IOException expected.", false);
} catch (IOException ioe) {
// okay
}
}
@Test (timeout = 30000)
/*
* Test method copy(FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf)
*/
public void testCopy5() throws IOException {
setupDirs();
URI uri = tmp.toURI();
Configuration conf = new Configuration();
FileSystem fs = FileSystem.newInstance(uri, conf);
final String content = "some-content";
File srcFile = createFile(tmp, "src", content);
Path srcPath = new Path(srcFile.toURI());
// copy regular file:
final File dest = new File(del, "dest");
boolean result = FileUtil.copy(fs, srcPath, dest, false, conf);
assertTrue(result);
assertTrue(dest.exists());
assertEquals(content.getBytes().length
+ System.getProperty("line.separator").getBytes().length, dest.length());
assertTrue(srcFile.exists()); // should not be deleted
// copy regular file, delete src:
dest.delete();
assertTrue(!dest.exists());
result = FileUtil.copy(fs, srcPath, dest, true, conf);
assertTrue(result);
assertTrue(dest.exists());
assertEquals(content.getBytes().length
+ System.getProperty("line.separator").getBytes().length, dest.length());
assertTrue(!srcFile.exists()); // should be deleted
// copy a dir:
dest.delete();
assertTrue(!dest.exists());
srcPath = new Path(partitioned.toURI());
result = FileUtil.copy(fs, srcPath, dest, true, conf);
assertTrue(result);
assertTrue(dest.exists() && dest.isDirectory());
File[] files = dest.listFiles();
assertTrue(files != null);
assertEquals(2, files.length);
for (File f: files) {
assertEquals(3
+ System.getProperty("line.separator").getBytes().length, f.length());
}
assertTrue(!partitioned.exists()); // should be deleted
}
@Test (timeout = 30000)
public void testStat2Paths1() {
assertNull(FileUtil.stat2Paths(null));
FileStatus[] fileStatuses = new FileStatus[0];
Path[] paths = FileUtil.stat2Paths(fileStatuses);
assertEquals(0, paths.length);
Path path1 = new Path("file://foo");
Path path2 = new Path("file://moo");
fileStatuses = new FileStatus[] {
new FileStatus(3, false, 0, 0, 0, path1),
new FileStatus(3, false, 0, 0, 0, path2)
};
paths = FileUtil.stat2Paths(fileStatuses);
assertEquals(2, paths.length);
assertEquals(paths[0], path1);
assertEquals(paths[1], path2);
}
@Test (timeout = 30000)
public void testStat2Paths2() {
Path defaultPath = new Path("file://default");
Path[] paths = FileUtil.stat2Paths(null, defaultPath);
assertEquals(1, paths.length);
assertEquals(defaultPath, paths[0]);
paths = FileUtil.stat2Paths(null, null);
assertTrue(paths != null);
assertEquals(1, paths.length);
assertEquals(null, paths[0]);
Path path1 = new Path("file://foo");
Path path2 = new Path("file://moo");
FileStatus[] fileStatuses = new FileStatus[] {
new FileStatus(3, false, 0, 0, 0, path1),
new FileStatus(3, false, 0, 0, 0, path2)
};
paths = FileUtil.stat2Paths(fileStatuses, defaultPath);
assertEquals(2, paths.length);
assertEquals(paths[0], path1);
assertEquals(paths[1], path2);
}
@Test (timeout = 30000)
public void testSymlink() throws Exception {
Assert.assertFalse(del.exists());
del.mkdirs();
byte[] data = "testSymLink".getBytes();
File file = new File(del, FILE);
File link = new File(del, "_link");
//write some data to the file
FileOutputStream os = new FileOutputStream(file);
os.write(data);
os.close();
//create the symlink
FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
//ensure that symlink length is correctly reported by Java
Assert.assertEquals(data.length, file.length());
Assert.assertEquals(data.length, link.length());
//ensure that we can read from link.
FileInputStream in = new FileInputStream(link);
long len = 0;
while (in.read() > 0) {
len++;
}
in.close();
Assert.assertEquals(data.length, len);
}
/**
* Test that rename on a symlink works as expected.
*/
@Test (timeout = 30000)
public void testSymlinkRenameTo() throws Exception {
Assert.assertFalse(del.exists());
del.mkdirs();
File file = new File(del, FILE);
file.createNewFile();
File link = new File(del, "_link");
// create the symlink
FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
Assert.assertTrue(file.exists());
Assert.assertTrue(link.exists());
File link2 = new File(del, "_link2");
// Rename the symlink
Assert.assertTrue(link.renameTo(link2));
// Make sure the file still exists
// (NOTE: this would fail on Java6 on Windows if we didn't
// copy the file in FileUtil#symlink)
Assert.assertTrue(file.exists());
Assert.assertTrue(link2.exists());
Assert.assertFalse(link.exists());
}
/**
* Test that deletion of a symlink works as expected.
*/
@Test (timeout = 30000)
public void testSymlinkDelete() throws Exception {
Assert.assertFalse(del.exists());
del.mkdirs();
File file = new File(del, FILE);
file.createNewFile();
File link = new File(del, "_link");
// create the symlink
FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
Assert.assertTrue(file.exists());
Assert.assertTrue(link.exists());
// make sure that deleting a symlink works properly
Assert.assertTrue(link.delete());
Assert.assertFalse(link.exists());
Assert.assertTrue(file.exists());
}
/**
* Test that length on a symlink works as expected.
*/
@Test (timeout = 30000)
public void testSymlinkLength() throws Exception {
Assert.assertFalse(del.exists());
del.mkdirs();
byte[] data = "testSymLinkData".getBytes();
File file = new File(del, FILE);
File link = new File(del, "_link");
// write some data to the file
FileOutputStream os = new FileOutputStream(file);
os.write(data);
os.close();
Assert.assertEquals(0, link.length());
// create the symlink
FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
// ensure that File#length returns the target file and link size
Assert.assertEquals(data.length, file.length());
Assert.assertEquals(data.length, link.length());
file.delete();
Assert.assertFalse(file.exists());
if (Shell.WINDOWS && !Shell.isJava7OrAbove()) {
// On Java6 on Windows, we copied the file
Assert.assertEquals(data.length, link.length());
} else {
// Otherwise, the target file size is zero
Assert.assertEquals(0, link.length());
}
link.delete();
Assert.assertFalse(link.exists());
}
private void doUntarAndVerify(File tarFile, File untarDir)
throws IOException {
if (untarDir.exists() && !FileUtil.fullyDelete(untarDir)) {
throw new IOException("Could not delete directory '" + untarDir + "'");
}
FileUtil.unTar(tarFile, untarDir);
String parentDir = untarDir.getCanonicalPath() + Path.SEPARATOR + "name";
File testFile = new File(parentDir + Path.SEPARATOR + "version");
Assert.assertTrue(testFile.exists());
Assert.assertTrue(testFile.length() == 0);
String imageDir = parentDir + Path.SEPARATOR + "image";
testFile = new File(imageDir + Path.SEPARATOR + "fsimage");
Assert.assertTrue(testFile.exists());
Assert.assertTrue(testFile.length() == 157);
String currentDir = parentDir + Path.SEPARATOR + "current";
testFile = new File(currentDir + Path.SEPARATOR + "fsimage");
Assert.assertTrue(testFile.exists());
Assert.assertTrue(testFile.length() == 4331);
testFile = new File(currentDir + Path.SEPARATOR + "edits");
Assert.assertTrue(testFile.exists());
Assert.assertTrue(testFile.length() == 1033);
testFile = new File(currentDir + Path.SEPARATOR + "fstime");
Assert.assertTrue(testFile.exists());
Assert.assertTrue(testFile.length() == 8);
}
@Test (timeout = 30000)
public void testUntar() throws IOException {
String tarGzFileName = System.getProperty("test.cache.data",
"build/test/cache") + "/test-untar.tgz";
String tarFileName = System.getProperty("test.cache.data",
"build/test/cache") + "/test-untar.tar";
String dataDir = System.getProperty("test.build.data", "build/test/data");
File untarDir = new File(dataDir, "untarDir");
doUntarAndVerify(new File(tarGzFileName), untarDir);
doUntarAndVerify(new File(tarFileName), untarDir);
}
@Test (timeout = 30000)
public void testCreateJarWithClassPath() throws Exception {
// setup test directory for files
Assert.assertFalse(tmp.exists());
Assert.assertTrue(tmp.mkdirs());
// create files expected to match a wildcard
List<File> wildcardMatches = Arrays.asList(new File(tmp, "wildcard1.jar"),
new File(tmp, "wildcard2.jar"), new File(tmp, "wildcard3.JAR"),
new File(tmp, "wildcard4.JAR"));
for (File wildcardMatch: wildcardMatches) {
Assert.assertTrue("failure creating file: " + wildcardMatch,
wildcardMatch.createNewFile());
}
// create non-jar files, which we expect to not be included in the classpath
Assert.assertTrue(new File(tmp, "text.txt").createNewFile());
Assert.assertTrue(new File(tmp, "executable.exe").createNewFile());
Assert.assertTrue(new File(tmp, "README").createNewFile());
// create classpath jar
String wildcardPath = tmp.getCanonicalPath() + File.separator + "*";
String nonExistentSubdir = tmp.getCanonicalPath() + Path.SEPARATOR + "subdir"
+ Path.SEPARATOR;
List<String> classPaths = Arrays.asList("", "cp1.jar", "cp2.jar", wildcardPath,
"cp3.jar", nonExistentSubdir);
String inputClassPath = StringUtils.join(File.pathSeparator, classPaths);
String classPathJar = FileUtil.createJarWithClassPath(inputClassPath,
new Path(tmp.getCanonicalPath()), System.getenv());
// verify classpath by reading manifest from jar file
JarFile jarFile = null;
try {
jarFile = new JarFile(classPathJar);
Manifest jarManifest = jarFile.getManifest();
Assert.assertNotNull(jarManifest);
Attributes mainAttributes = jarManifest.getMainAttributes();
Assert.assertNotNull(mainAttributes);
Assert.assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH));
String classPathAttr = mainAttributes.getValue(Attributes.Name.CLASS_PATH);
Assert.assertNotNull(classPathAttr);
List<String> expectedClassPaths = new ArrayList<String>();
for (String classPath: classPaths) {
if (classPath.length() == 0) {
continue;
}
if (wildcardPath.equals(classPath)) {
// add wildcard matches
for (File wildcardMatch: wildcardMatches) {
expectedClassPaths.add(wildcardMatch.toURI().toURL()
.toExternalForm());
}
} else {
File fileCp = null;
if(!new Path(classPath).isAbsolute()) {
fileCp = new File(tmp, classPath);
}
else {
fileCp = new File(classPath);
}
if (nonExistentSubdir.equals(classPath)) {
// expect to maintain trailing path separator if present in input, even
// if directory doesn't exist yet
expectedClassPaths.add(fileCp.toURI().toURL()
.toExternalForm() + Path.SEPARATOR);
} else {
expectedClassPaths.add(fileCp.toURI().toURL()
.toExternalForm());
}
}
}
List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));
Collections.sort(expectedClassPaths);
Collections.sort(actualClassPaths);
Assert.assertEquals(expectedClassPaths, actualClassPaths);
} finally {
if (jarFile != null) {
try {
jarFile.close();
} catch (IOException e) {
LOG.warn("exception closing jarFile: " + classPathJar, e);
}
}
}
}
}
|
apache/hive | 37,600 | jdbc/src/java/org/apache/hive/jdbc/Utils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.jdbc;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.rpc.thrift.TStatus;
import org.apache.hive.service.rpc.thrift.TStatusCode;
import org.apache.http.client.CookieStore;
import org.apache.http.cookie.Cookie;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Utils {
static final Logger LOG = LoggerFactory.getLogger(Utils.class.getName());
/**
* The required prefix for the connection URL.
*/
public static final String URL_PREFIX = "jdbc:hive2://";
/**
* If host is provided, without a port.
*/
static final String DEFAULT_PORT = "10000";
// To parse the intermediate URI as a Java URI, we'll give a dummy authority(dummyhost:00000).
// Later, we'll substitute the dummy authority for a resolved authority.
static final String dummyAuthorityString = "dummyhost:00000";
/**
* Hive's default database name
*/
static final String DEFAULT_DATABASE = "default";
private static final String URI_JDBC_PREFIX = "jdbc:";
private static final String URI_HIVE_PREFIX = "hive2:";
// This value is set to true by the setServiceUnavailableRetryStrategy() when the server returns 401.
// This value is used only when cookie is sent for authorization. In case the cookie is expired,
// client will send the actual credentials in the next connection request.
// If credentials are sent in the first request it self, then no need to retry.
static final String HIVE_SERVER2_RETRY_KEY = "hive.server2.retryserver";
static final String HIVE_SERVER2_SENT_CREDENTIALS = "hive.server2.sentCredentials";
static final String HIVE_SERVER2_CONST_TRUE = "true";
static final String HIVE_SERVER2_CONST_FALSE = "false";
public static class JdbcConnectionParams {
// Note on client side parameter naming convention:
// Prefer using a shorter camelCase param name instead of using the same name as the
// corresponding
// HiveServer2 config.
// For a jdbc url: jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list,
// client side params are specified in sess_var_list
// Client param names:
// Retry setting
public static final String RETRIES = "retries";
public static final String RETRY_INTERVAL = "retryInterval";
public static final String AUTH_TYPE = "auth";
// We're deprecating this variable's name.
public static final String AUTH_QOP_DEPRECATED = "sasl.qop";
public static final String AUTH_QOP = "saslQop";
public static final String AUTH_SIMPLE = "noSasl";
public static final String AUTH_TOKEN = "delegationToken";
public static final String AUTH_USER = "user";
public static final String AUTH_PRINCIPAL = "principal";
public static final String AUTH_PASSWD = "password";
public static final String AUTH_KERBEROS_AUTH_TYPE = "kerberosAuthType";
public static final String AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT = "fromSubject";
public static final String AUTH_KERBEROS_ENABLE_CANONICAL_HOSTNAME_CHECK = "kerberosEnableCanonicalHostnameCheck";
public static final String AUTH_TYPE_JWT = "jwt";
public static final String AUTH_TYPE_JWT_KEY = "jwt";
public static final String AUTH_JWT_ENV = "JWT";
// JdbcConnection param which specifies if we need to use a browser to do
// authentication.
// JdbcConnectionParam which specifies if the authMode is done via a browser
public static final String AUTH_SSO_BROWSER_MODE = "browser";
public static final String AUTH_SSO_TOKEN_MODE = "token";
// connection parameter used to specify a port number to listen on in case of
// browser mode.
public static final String AUTH_BROWSER_RESPONSE_PORT = "browserResponsePort";
// connection parameter used to specify the timeout in seconds for the browser mode
public static final String AUTH_BROWSER_RESPONSE_TIMEOUT_SECS = "browserResponseTimeout";
// connection parameter to optionally disable the SSL validation done when using
// browser based authentication. Useful mostly for testing/dev purposes.
// By default, SSL validation is done unless this parameter is set to true.
public static final String AUTH_BROWSER_DISABLE_SSL_VALIDATION = "browserDisableSslCheck";
public static final String ANONYMOUS_USER = "anonymous";
public static final String ANONYMOUS_PASSWD = "anonymous";
public static final String USE_SSL = "ssl";
public static final String SSL_TRUST_STORE = "sslTrustStore";
public static final String SSL_TRUST_STORE_PASSWORD = "trustStorePassword";
public static final String SSL_TRUST_STORE_TYPE = "trustStoreType";
public static final String SSL_TRUST_MANAGER_FACTORY_ALGORITHM = "trustManagerFactoryAlgorithm";
// We're deprecating the name and placement of this in the parsed map (from hive conf vars to
// hive session vars).
static final String TRANSPORT_MODE_DEPRECATED = "hive.server2.transport.mode";
public static final String TRANSPORT_MODE = "transportMode";
// We're deprecating the name and placement of this in the parsed map (from hive conf vars to
// hive session vars).
static final String HTTP_PATH_DEPRECATED = "hive.server2.thrift.http.path";
public static final String HTTP_PATH = "httpPath";
public static final String SERVICE_DISCOVERY_MODE = "serviceDiscoveryMode";
public static final String PROPERTY_DRIVER = "driver";
public static final String PROPERTY_URL = "url";
// Don't use dynamic service discovery
static final String SERVICE_DISCOVERY_MODE_NONE = "none";
// Use ZooKeeper for indirection while using dynamic service discovery
public static final String SERVICE_DISCOVERY_MODE_ZOOKEEPER = "zooKeeper";
public static final String SERVICE_DISCOVERY_MODE_ZOOKEEPER_HA = "zooKeeperHA";
public static final String ZOOKEEPER_NAMESPACE = "zooKeeperNamespace";
public static final String ZOOKEEPER_SSL_ENABLE = "zooKeeperSSLEnable";
public static final String ZOOKEEPER_KEYSTORE_LOCATION = "zooKeeperKeystoreLocation";
public static final String ZOOKEEPER_KEYSTORE_PASSWORD= "zooKeeperKeystorePassword";
public static final String ZOOKEEPER_KEYSTORE_TYPE= "zooKeeperKeystoreType";
public static final String ZOOKEEPER_TRUSTSTORE_LOCATION = "zooKeeperTruststoreLocation";
public static final String ZOOKEEPER_TRUSTSTORE_PASSWORD = "zooKeeperTruststorePassword";
public static final String ZOOKEEPER_TRUSTSTORE_TYPE = "zooKeeperTruststoreType";
// Default namespace value on ZooKeeper.
// This value is used if the param "zooKeeperNamespace" is not specified in the JDBC Uri.
static final String ZOOKEEPER_DEFAULT_NAMESPACE = "hiveserver2";
static final String ZOOKEEPER_ACTIVE_PASSIVE_HA_DEFAULT_NAMESPACE = "hs2ActivePassiveHA";
static final String COOKIE_AUTH = "cookieAuth";
static final String COOKIE_AUTH_FALSE = "false";
static final String COOKIE_NAME = "cookieName";
// The default value of the cookie name when CookieAuth=true
static final String DEFAULT_COOKIE_NAMES_HS2 = "hive.server2.auth";
// The http header prefix for additional headers which have to be appended to the request
static final String HTTP_HEADER_PREFIX = "http.header.";
// Request tracking
static final String JDBC_PARAM_REQUEST_TRACK = "requestTrack";
// Set the fetchSize
static final String FETCH_SIZE = "fetchSize";
static final String INIT_FILE = "initFile";
static final String FETCH_THREADS = "fetchThreads";
static final String WM_POOL = "wmPool";
// Cookie prefix
static final String HTTP_COOKIE_PREFIX = "http.cookie.";
// Create external purge table by default
static final String CREATE_TABLE_AS_EXTERNAL = "hiveCreateAsExternalLegacy";
public static final String SOCKET_TIMEOUT = "socketTimeout";
static final String THRIFT_CLIENT_MAX_MESSAGE_SIZE = "thrift.client.max.message.size";
// We support ways to specify application name modeled after some existing DBs, since
// there's no standard approach.
// MSSQL: applicationName https://docs.microsoft.com/en-us/sql/connect/jdbc/building-the-connection-url
// Postgres 9~: ApplicationName https://jdbc.postgresql.org/documentation/91/connect.html
// Note: various ODBC names used include "Application Name", "APP", etc. Add those?
static final String[] APPLICATION = new String[] { "applicationName", "ApplicationName" };
// --------------- Begin 2 way ssl options -------------------------
// Use two way ssl. This param will take effect only when ssl=true
static final String USE_TWO_WAY_SSL = "twoWay";
static final String TRUE = "true";
static final String SSL_KEY_STORE = "sslKeyStore";
static final String SSL_KEY_STORE_PASSWORD = "keyStorePassword";
static final String SSL_KEY_STORE_TYPE = "keyStoreType";
static final String SUNX509_ALGORITHM_STRING = "SunX509";
static final String SUNJSSE_ALGORITHM_STRING = "SunJSSE";
// --------------- End 2 way ssl options ----------------------------
static final String SSL_STORE_PASSWORD_PATH = "storePasswordPath";
private static final String HIVE_VAR_PREFIX = "hivevar:";
public static final String HIVE_CONF_PREFIX = "hiveconf:";
private String host = null;
private int port = 0;
private String jdbcUriString;
private String dbName = DEFAULT_DATABASE;
private Map<String,String> hiveConfs = new LinkedHashMap<String,String>();
private Map<String,String> hiveVars = new LinkedHashMap<String,String>();
private Map<String,String> sessionVars = new LinkedHashMap<String,String>();
private boolean isEmbeddedMode = false;
private String suppliedURLAuthority;
private String zooKeeperEnsemble = null;
private boolean zooKeeperSslEnabled = false;
private String zookeeperKeyStoreLocation = "";
private String zookeeperKeyStorePassword = "";
private String zookeeperKeyStoreType;
private String zookeeperTrustStoreLocation = "";
private String zookeeperTrustStorePassword = "";
private String zookeeperTrustStoreType;
private String currentHostZnodePath;
private final List<String> rejectedHostZnodePaths = new ArrayList<String>();
// HiveConf parameters
private static String getNullsLastVarname() {
try {
return HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST.varname;
} catch(java.lang.NoSuchFieldError e) {
return "hive.default.nulls.last";
}
}
public static final String HIVE_DEFAULT_NULLS_LAST_KEY =
HIVE_CONF_PREFIX + getNullsLastVarname();
private static String getFetchThreadsVarname() {
try {
return HiveConf.ConfVars.HIVE_JDBC_FETCH_THREADS.varname;
} catch(java.lang.NoSuchFieldError e) {
return "hive.jdbc.fetch.threads";
}
}
public static final String HIVE_HIVE_JDBC_FETCH_THREADS_KEY =
HIVE_CONF_PREFIX + getFetchThreadsVarname();
public JdbcConnectionParams() {
}
public JdbcConnectionParams(JdbcConnectionParams params) {
this.host = params.host;
this.port = params.port;
this.jdbcUriString = params.jdbcUriString;
this.dbName = params.dbName;
this.hiveConfs.putAll(params.hiveConfs);
this.hiveVars.putAll(params.hiveVars);
this.sessionVars.putAll(params.sessionVars);
this.isEmbeddedMode = params.isEmbeddedMode;
this.suppliedURLAuthority = params.suppliedURLAuthority;
this.zooKeeperEnsemble = params.zooKeeperEnsemble;
this.zooKeeperSslEnabled = params.zooKeeperSslEnabled;
this.zookeeperKeyStoreLocation = params.zookeeperKeyStoreLocation;
this.zookeeperKeyStorePassword = params.zookeeperKeyStorePassword;
this.zookeeperKeyStoreType = params.zookeeperKeyStoreType;
this.zookeeperTrustStoreLocation = params.zookeeperTrustStoreLocation;
this.zookeeperTrustStorePassword = params.zookeeperTrustStorePassword;
this.zookeeperTrustStoreType = params.zookeeperTrustStoreType;
this.currentHostZnodePath = params.currentHostZnodePath;
this.rejectedHostZnodePaths.addAll(rejectedHostZnodePaths);
}
public String getHost() {
return host;
}
public int getPort() {
return port;
}
public String getJdbcUriString() {
return jdbcUriString;
}
public String getDbName() {
return dbName;
}
public Map<String, String> getHiveConfs() {
return hiveConfs;
}
public Map<String, String> getHiveVars() {
return hiveVars;
}
public boolean isEmbeddedMode() {
return isEmbeddedMode;
}
public Map<String, String> getSessionVars() {
return sessionVars;
}
public String getSuppliedURLAuthority() {
return suppliedURLAuthority;
}
public String getZooKeeperEnsemble() {
return zooKeeperEnsemble;
}
public boolean isZooKeeperSslEnabled() {
return zooKeeperSslEnabled;
}
public String getZookeeperKeyStoreLocation() {
return zookeeperKeyStoreLocation;
}
public String getZookeeperKeyStorePassword() {
return zookeeperKeyStorePassword;
}
public String getZookeeperKeyStoreType() {
return zookeeperKeyStoreType;
}
public String getZookeeperTrustStoreLocation() {
return zookeeperTrustStoreLocation;
}
public String getZookeeperTrustStorePassword() {
return zookeeperTrustStorePassword;
}
public String getZookeeperTrustStoreType() {
return zookeeperTrustStoreType;
}
public List<String> getRejectedHostZnodePaths() {
return rejectedHostZnodePaths;
}
public String getCurrentHostZnodePath() {
return currentHostZnodePath;
}
public void setHost(String host) {
this.host = host;
}
public void setPort(int port) {
this.port = port;
}
public void setJdbcUriString(String jdbcUriString) {
this.jdbcUriString = jdbcUriString;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
public void setHiveConfs(Map<String, String> hiveConfs) {
this.hiveConfs = hiveConfs;
}
public void setHiveVars(Map<String, String> hiveVars) {
this.hiveVars = hiveVars;
}
public void setEmbeddedMode(boolean embeddedMode) {
this.isEmbeddedMode = embeddedMode;
}
public void setSessionVars(Map<String, String> sessionVars) {
this.sessionVars = sessionVars;
}
public void setSuppliedURLAuthority(String suppliedURLAuthority) {
this.suppliedURLAuthority = suppliedURLAuthority;
}
public void setZooKeeperEnsemble(String zooKeeperEnsemble) {
this.zooKeeperEnsemble = zooKeeperEnsemble;
}
public void setZooKeeperSslEnabled(boolean zooKeeperSslEnabled) {
this.zooKeeperSslEnabled = zooKeeperSslEnabled;
}
public void setZookeeperKeyStoreLocation(String zookeeperKeyStoreLocation) {
this.zookeeperKeyStoreLocation = zookeeperKeyStoreLocation;
}
public void setZookeeperKeyStorePassword(String zookeeperKeyStorePassword) {
this.zookeeperKeyStorePassword = zookeeperKeyStorePassword;
}
public void setZookeeperKeyStoreType(String zookeeperKeyStoreType) {
this.zookeeperKeyStoreType = zookeeperKeyStoreType;
}
public void setZookeeperTrustStoreLocation(String zookeeperTrustStoreLocation) {
this.zookeeperTrustStoreLocation = zookeeperTrustStoreLocation;
}
public void setZookeeperTrustStorePassword(String zookeeperTrustStorePassword) {
this.zookeeperTrustStorePassword = zookeeperTrustStorePassword;
}
public void setZookeeperTrustStoreType(String zookeeperTrustStoreType) {
this.zookeeperTrustStoreType = zookeeperTrustStoreType;
}
public void setCurrentHostZnodePath(String currentHostZnodePath) {
this.currentHostZnodePath = currentHostZnodePath;
}
}
// Verify success or success_with_info status, else throw SQLException
static void verifySuccessWithInfo(TStatus status) throws SQLException {
verifySuccess(status, true);
}
// Verify success status, else throw SQLException
static void verifySuccess(TStatus status) throws SQLException {
verifySuccess(status, false);
}
// Verify success and optionally with_info status, else throw SQLException
static void verifySuccess(TStatus status, boolean withInfo) throws SQLException {
if (status.getStatusCode() == TStatusCode.SUCCESS_STATUS ||
(withInfo && status.getStatusCode() == TStatusCode.SUCCESS_WITH_INFO_STATUS)) {
return;
}
throw new HiveSQLException(status);
}
public static JdbcConnectionParams parseURL(String uri) throws JdbcUriParseException,
SQLException, ZooKeeperHiveClientException {
return parseURL(uri, new Properties());
}
/**
* Parse JDBC connection URL
* The new format of the URL is:
* jdbc:hive2://<host1>:<port1>,<host2>:<port2>/dbName;sess_var_list?hive_conf_list#hive_var_list
* where the optional sess, conf and var lists are semicolon separated <key>=<val> pairs.
* For utilizing dynamic service discovery with HiveServer2 multiple comma separated host:port pairs can
* be specified as shown above.
* The JDBC driver resolves the list of uris and picks a specific server instance to connect to.
* Currently, dynamic service discovery using ZooKeeper is supported, in which case the host:port pairs represent a ZooKeeper ensemble.
*
* As before, if the host/port is not specified, it the driver runs an embedded hive.
* examples -
* jdbc:hive2://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
* jdbc:hive2://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
* jdbc:hive2://ubuntu:11000/db2;user=foo;password=bar
*
* Connect to http://server:10001/hs2, with specified basicAuth credentials and initial database:
* jdbc:hive2://server:10001/db;user=foo;password=bar?hive.server2.transport.mode=http;hive.server2.thrift.http.path=hs2
*
* @param uri
* @return
* @throws SQLException
*/
public static JdbcConnectionParams parseURL(String uri, Properties info)
throws JdbcUriParseException, SQLException, ZooKeeperHiveClientException {
JdbcConnectionParams connParams = extractURLComponents(uri, info);
if (ZooKeeperHiveClientHelper.isZkDynamicDiscoveryMode(connParams.getSessionVars())) {
configureConnParamsFromZooKeeper(connParams);
}
handleAllDeprecations(connParams);
return connParams;
}
/**
* This method handles the base parsing of the given jdbc uri. Some of JdbcConnectionParams
* returned from this method are updated if ZooKeeper is used for service discovery
*
* @param uri
* @param info
* @return
* @throws JdbcUriParseException
*/
public static JdbcConnectionParams extractURLComponents(String uri, Properties info)
throws JdbcUriParseException {
JdbcConnectionParams connParams = new JdbcConnectionParams();
if (!uri.startsWith(URL_PREFIX)) {
throw new JdbcUriParseException("Bad URL format: Missing prefix " + URL_PREFIX);
}
// For URLs with no other configuration
// Don't parse them, but set embedded mode as true
if (uri.equalsIgnoreCase(URL_PREFIX)) {
connParams.setEmbeddedMode(true);
return connParams;
}
// The JDBC URI now supports specifying multiple host:port if dynamic service discovery is
// configured on HiveServer2 (like: host1:port1,host2:port2,host3:port3)
// We'll extract the authorities (host:port combo) from the URI, extract session vars, hive
// confs & hive vars by parsing it as a Java URI.
String authorityFromClientJdbcURL = getAuthorityFromJdbcURL(uri);
if ((authorityFromClientJdbcURL == null) || (authorityFromClientJdbcURL.isEmpty())) {
// Given uri of the form:
// jdbc:hive2:///dbName;sess_var_list?hive_conf_list#hive_var_list
connParams.setEmbeddedMode(true);
} else {
connParams.setSuppliedURLAuthority(authorityFromClientJdbcURL);
uri = uri.replace(authorityFromClientJdbcURL, dummyAuthorityString);
}
// Now parse the connection uri with dummy authority
URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length()));
// key=value pattern
Pattern pattern = Pattern.compile("([^;]*)=([^;]*)[;]?");
// dbname and session settings
String sessVars = jdbcURI.getPath();
if ((sessVars != null) && !sessVars.isEmpty()) {
String dbName = "";
// removing leading '/' returned by getPath()
sessVars = sessVars.substring(1);
if (!sessVars.contains(";")) {
// only dbname is provided
dbName = sessVars;
} else {
// we have dbname followed by session parameters
dbName = sessVars.substring(0, sessVars.indexOf(';'));
sessVars = sessVars.substring(sessVars.indexOf(';') + 1);
if (sessVars != null) {
Matcher sessMatcher = pattern.matcher(sessVars);
while (sessMatcher.find()) {
if (connParams.getSessionVars().put(sessMatcher.group(1),
sessMatcher.group(2)) != null) {
throw new JdbcUriParseException(
"Bad URL format: Multiple values for property " + sessMatcher.group(1));
}
}
}
}
if (!dbName.isEmpty()) {
connParams.setDbName(dbName);
}
}
Pattern fullPasswordPattern = Pattern.compile("(?i)(?<=;|^)password=([^;]+)");
Matcher fullPwdMatcher = fullPasswordPattern.matcher(uri);
if (fullPwdMatcher.find()) {
String fullPassword = fullPwdMatcher.group(1);
connParams.getSessionVars().put(JdbcConnectionParams.AUTH_PASSWD, fullPassword);
}
// parse hive conf settings
String confStr = jdbcURI.getQuery();
if (confStr != null) {
Matcher confMatcher = pattern.matcher(confStr);
while (confMatcher.find()) {
connParams.getHiveConfs().put(confMatcher.group(1), confMatcher.group(2));
}
}
// parse hive var settings
String varStr = jdbcURI.getFragment();
if (varStr != null) {
Matcher varMatcher = pattern.matcher(varStr);
while (varMatcher.find()) {
connParams.getHiveVars().put(varMatcher.group(1), varMatcher.group(2));
}
}
// Apply configs supplied in the JDBC connection properties object
for (Map.Entry<Object, Object> kv : info.entrySet()) {
if ((kv.getKey() instanceof String)) {
String key = (String) kv.getKey();
if (key.startsWith(JdbcConnectionParams.HIVE_VAR_PREFIX)) {
connParams.getHiveVars().put(key.substring(JdbcConnectionParams.HIVE_VAR_PREFIX.length()),
info.getProperty(key));
} else if (key.startsWith(JdbcConnectionParams.HIVE_CONF_PREFIX)) {
connParams.getHiveConfs().put(
key.substring(JdbcConnectionParams.HIVE_CONF_PREFIX.length()), info.getProperty(key));
}
}
}
// Extract user/password from JDBC connection properties if its not supplied
// in the connection URL
if (!connParams.getSessionVars().containsKey(JdbcConnectionParams.AUTH_USER)) {
if (info.containsKey(JdbcConnectionParams.AUTH_USER)) {
connParams.getSessionVars().put(JdbcConnectionParams.AUTH_USER,
info.getProperty(JdbcConnectionParams.AUTH_USER));
}
if (info.containsKey(JdbcConnectionParams.AUTH_PASSWD)) {
connParams.getSessionVars().put(JdbcConnectionParams.AUTH_PASSWD,
info.getProperty(JdbcConnectionParams.AUTH_PASSWD));
}
}
if (!connParams.getSessionVars().containsKey(JdbcConnectionParams.AUTH_PASSWD)) {
if (info.containsKey(JdbcConnectionParams.AUTH_USER)) {
connParams.getSessionVars().put(JdbcConnectionParams.AUTH_USER,
info.getProperty(JdbcConnectionParams.AUTH_USER));
}
if (info.containsKey(JdbcConnectionParams.AUTH_PASSWD)) {
connParams.getSessionVars().put(JdbcConnectionParams.AUTH_PASSWD,
info.getProperty(JdbcConnectionParams.AUTH_PASSWD));
}
}
if (info.containsKey(JdbcConnectionParams.AUTH_TYPE)) {
connParams.getSessionVars().put(JdbcConnectionParams.AUTH_TYPE,
info.getProperty(JdbcConnectionParams.AUTH_TYPE));
}
// Extract host, port
if (connParams.isEmbeddedMode()) {
// In case of embedded mode we were supplied with an empty authority.
// So we never substituted the authority with a dummy one.
connParams.setHost(jdbcURI.getHost());
connParams.setPort(jdbcURI.getPort());
} else {
String authorityStr = connParams.getSuppliedURLAuthority();
// If we're using ZooKeeper, the final host, port will be read from ZooKeeper
// (in a different method call). Therefore, we put back the original authority string
// (which basically is the ZooKeeper ensemble) back in the uri
if (ZooKeeperHiveClientHelper.isZkDynamicDiscoveryMode(connParams.getSessionVars())) {
uri = uri.replace(dummyAuthorityString, authorityStr);
// Set ZooKeeper ensemble in connParams for later use
connParams.setZooKeeperEnsemble(authorityStr);
ZooKeeperHiveClientHelper.setZkSSLParams(connParams);
} else {
URI jdbcBaseURI = URI.create(URI_HIVE_PREFIX + "//" + authorityStr);
// Check to prevent unintentional use of embedded mode. A missing "/"
// to separate the 'path' portion of URI can result in this.
// The missing "/" common typo while using secure mode, eg of such url -
// jdbc:hive2://localhost:10000;principal=hive/HiveServer2Host@YOUR-REALM.COM
if (jdbcBaseURI.getAuthority() != null) {
String host = jdbcBaseURI.getHost();
int port = jdbcBaseURI.getPort();
if (host == null) {
throw new JdbcUriParseException(
"Bad URL format. Hostname not found " + " in authority part of the url: "
+ jdbcBaseURI.getAuthority() + ". Are you missing a '/' after the hostname ?");
}
// Set the port to default value; we do support jdbc url like:
// jdbc:hive2://localhost/db
if (port <= 0) {
port = Integer.parseInt(Utils.DEFAULT_PORT);
}
connParams.setHost(host);
connParams.setPort(port);
}
// We check for invalid host, port while configuring connParams with configureConnParams()
authorityStr = IPStackUtils.concatHostPort(connParams.getHost(), connParams.getPort());
LOG.debug("Resolved authority: " + authorityStr);
uri = uri.replace(dummyAuthorityString, authorityStr);
}
}
connParams.setJdbcUriString(uri);
return connParams;
}
// Configure using ZooKeeper
static void configureConnParamsFromZooKeeper(JdbcConnectionParams connParams)
throws ZooKeeperHiveClientException, JdbcUriParseException {
ZooKeeperHiveClientHelper.configureConnParams(connParams);
String authorityStr = IPStackUtils.concatHostPort(connParams.getHost(), connParams.getPort());
LOG.debug("Resolved authority: " + authorityStr);
String jdbcUriString = connParams.getJdbcUriString();
// Replace ZooKeeper ensemble from the authority component of the JDBC Uri provided by the
// client, by the host:port of the resolved server instance we will connect to
connParams.setJdbcUriString(
jdbcUriString.replace(getAuthorityFromJdbcURL(jdbcUriString), authorityStr));
}
private static void handleAllDeprecations(JdbcConnectionParams connParams) {
// Handle all deprecations here:
String newUsage;
String usageUrlBase = "jdbc:hive2://<host>:<port>/dbName;";
// Handle deprecation of AUTH_QOP_DEPRECATED
newUsage = usageUrlBase + JdbcConnectionParams.AUTH_QOP + "=<qop_value>";
handleParamDeprecation(connParams.getSessionVars(), connParams.getSessionVars(),
JdbcConnectionParams.AUTH_QOP_DEPRECATED, JdbcConnectionParams.AUTH_QOP, newUsage);
// Handle deprecation of TRANSPORT_MODE_DEPRECATED
newUsage = usageUrlBase + JdbcConnectionParams.TRANSPORT_MODE + "=<transport_mode_value>";
handleParamDeprecation(connParams.getHiveConfs(), connParams.getSessionVars(),
JdbcConnectionParams.TRANSPORT_MODE_DEPRECATED, JdbcConnectionParams.TRANSPORT_MODE,
newUsage);
// Handle deprecation of HTTP_PATH_DEPRECATED
newUsage = usageUrlBase + JdbcConnectionParams.HTTP_PATH + "=<http_path_value>";
handleParamDeprecation(connParams.getHiveConfs(), connParams.getSessionVars(),
JdbcConnectionParams.HTTP_PATH_DEPRECATED, JdbcConnectionParams.HTTP_PATH, newUsage);
}
/**
* Remove the deprecatedName param from the fromMap and put the key value in the toMap.
* Also log a deprecation message for the client.
* @param fromMap
* @param toMap
* @param deprecatedName
* @param newName
* @param newUsage
*/
private static void handleParamDeprecation(Map<String, String> fromMap, Map<String, String> toMap,
String deprecatedName, String newName, String newUsage) {
if (fromMap.containsKey(deprecatedName)) {
LOG.warn("***** JDBC param deprecation *****");
LOG.warn("The use of " + deprecatedName + " is deprecated.");
LOG.warn("Please use " + newName +" like so: " + newUsage);
String paramValue = fromMap.remove(deprecatedName);
toMap.put(newName, paramValue);
}
}
/**
* Get the authority string from the supplied uri, which could potentially contain multiple
* host:port pairs.
*
* @param uri
* @return
* @throws JdbcUriParseException
*/
private static String getAuthorityFromJdbcURL(String uri) throws JdbcUriParseException {
String authorities;
/**
* For a jdbc uri like:
* jdbc:hive2://<host1>:<port1>,<host2>:<port2>/dbName;sess_var_list?conf_list#var_list Extract
* the uri host:port list starting after "jdbc:hive2://", till the 1st "/" or "?" or "#"
* whichever comes first & in the given order Examples:
* jdbc:hive2://host1:port1,host2:port2,host3:port3/db;k1=v1?k2=v2#k3=v3
* jdbc:hive2://host1:port1,host2:port2,host3:port3/;k1=v1?k2=v2#k3=v3
* jdbc:hive2://host1:port1,host2:port2,host3:port3?k2=v2#k3=v3
* jdbc:hive2://host1:port1,host2:port2,host3:port3#k3=v3
*/
int fromIndex = Utils.URL_PREFIX.length();
int toIndex = -1;
for (String toIndexChar : Arrays.asList("/", "?", "#")) {
toIndex = uri.indexOf(toIndexChar, fromIndex);
if (toIndex > 0) {
break;
}
}
if (toIndex < 0) {
authorities = uri.substring(fromIndex);
} else {
authorities = uri.substring(fromIndex, toIndex);
}
return authorities;
}
/**
* Read the next server coordinates (host:port combo) from ZooKeeper. Ignore the znodes already
* explored. Also update the host, port, jdbcUriString and other configs published by the server.
*
* @param connParams
* @return true if new server info is retrieved successfully
*/
static boolean updateConnParamsFromZooKeeper(JdbcConnectionParams connParams) {
// Add current host to the rejected list
connParams.getRejectedHostZnodePaths().add(connParams.getCurrentHostZnodePath());
String oldServerHost = connParams.getHost();
int oldServerPort = connParams.getPort();
// Update connection params (including host, port) from ZooKeeper
try {
ZooKeeperHiveClientHelper.configureConnParams(connParams);
connParams.setJdbcUriString(connParams.getJdbcUriString().replace(
oldServerHost + ":" + oldServerPort, connParams.getHost() + ":" + connParams.getPort()));
LOG.info("Selected HiveServer2 instance with uri: " + connParams.getJdbcUriString());
} catch(ZooKeeperHiveClientException e) {
LOG.error(e.getMessage());
return false;
}
return true;
}
/**
* Takes a version string delimited by '.' and '-' characters
* and returns a partial version.
*
* @param fullVersion
* version string.
* @param position
* position of version string to get starting at 0. eg, for a X.x.xxx
* string, 0 will return the major version, 1 will return minor
* version.
* @return version part, or -1 if version string was malformed.
*/
static int getVersionPart(String fullVersion, int position) {
int version = -1;
try {
String[] tokens = fullVersion.split("[\\.-]"); //$NON-NLS-1$
if (tokens != null && tokens.length > 1 && tokens[position] != null) {
version = Integer.parseInt(tokens[position]);
}
} catch (Exception e) {
version = -1;
}
return version;
}
/**
* The function iterates through the list of cookies in the cookiestore and tries to
* match them with the cookieName. If there is a match, the cookieStore already
* has a valid cookie and the client need not send Credentials for validation purpose.
* @param cookieStore The cookie Store
* @param cookieName Name of the cookie which needs to be validated
* @param isSSL Whether this is a http/https connection
* @return true or false based on whether the client needs to send the credentials or
* not to the server.
*/
static boolean needToSendCredentials(CookieStore cookieStore, String cookieName, boolean isSSL) {
if (cookieName == null || cookieStore == null) {
return true;
}
List<Cookie> cookies = cookieStore.getCookies();
for (Cookie c : cookies) {
// If this is a secured cookie and the current connection is non-secured,
// then, skip this cookie. We need to skip this cookie because, the cookie
// replay will not be transmitted to the server.
if (c.isSecure() && !isSSL) {
continue;
}
if (c.getName().equals(cookieName)) {
return false;
}
}
return true;
}
public static String parsePropertyFromUrl(final String url, final String key) {
String[] tokens = url.split(";");
for (String token : tokens) {
if (token.trim().startsWith(key.trim() + "=")) {
return token.trim().substring((key.trim() + "=").length());
}
}
return null;
}
/**
* Method to get canonical-ized hostname, given a hostname (possibly a CNAME).
* This should allow for service-principals to use simplified CNAMEs.
* @param hostName The hostname to be canonical-ized.
* @return Given a CNAME, the canonical-ized hostname is returned. If not found, the original hostname is returned.
*/
public static String getCanonicalHostName(String hostName) {
try {
return InetAddress.getByName(hostName).getCanonicalHostName();
}
catch(UnknownHostException exception) {
LOG.warn("Could not retrieve canonical hostname for " + hostName, exception);
return hostName;
}
}
/**
* Method to get the password from the credential provider
* @param providerPath provider path
* @param key alias name
* @return password
*/
private static String getPasswordFromCredentialProvider(String providerPath, String key) {
try {
if (providerPath != null) {
Configuration conf = new Configuration();
conf.set("hadoop.security.credential.provider.path", providerPath);
char[] password = conf.getPassword(key);
if (password != null) {
return new String(password);
}
}
} catch(IOException exception) {
LOG.warn("Could not retrieve password for " + key, exception);
}
return null;
}
/**
* Method to get the password from the configuration map if available. Otherwise, get it from the credential provider
* @param confMap configuration map
* @param key param
* @return password
*/
public static String getPassword(Map<String, String> confMap, String key) {
String password = confMap.get(key);
if (password == null) {
password = getPasswordFromCredentialProvider(confMap.get(JdbcConnectionParams.SSL_STORE_PASSWORD_PATH), key);
}
return password;
}
}
|
googleapis/google-cloud-java | 37,551 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/OutputAudioConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/audio_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* Instructs the speech synthesizer how to generate the output audio content.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig}
*/
public final class OutputAudioConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig)
OutputAudioConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use OutputAudioConfig.newBuilder() to construct.
private OutputAudioConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OutputAudioConfig() {
audioEncoding_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OutputAudioConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_OutputAudioConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_OutputAudioConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig.class,
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig.Builder.class);
}
private int bitField0_;
public static final int AUDIO_ENCODING_FIELD_NUMBER = 1;
private int audioEncoding_ = 0;
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for audioEncoding.
*/
@java.lang.Override
public int getAudioEncodingValue() {
return audioEncoding_;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The audioEncoding.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding getAudioEncoding() {
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding result =
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding.forNumber(audioEncoding_);
return result == null
? com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding.UNRECOGNIZED
: result;
}
public static final int SAMPLE_RATE_HERTZ_FIELD_NUMBER = 2;
private int sampleRateHertz_ = 0;
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @return The sampleRateHertz.
*/
@java.lang.Override
public int getSampleRateHertz() {
return sampleRateHertz_;
}
public static final int SYNTHESIZE_SPEECH_CONFIG_FIELD_NUMBER = 3;
private com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesizeSpeechConfig_;
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return Whether the synthesizeSpeechConfig field is set.
*/
@java.lang.Override
public boolean hasSynthesizeSpeechConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return The synthesizeSpeechConfig.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig getSynthesizeSpeechConfig() {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfigOrBuilder
getSynthesizeSpeechConfigOrBuilder() {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (audioEncoding_
!= com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding
.OUTPUT_AUDIO_ENCODING_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, audioEncoding_);
}
if (sampleRateHertz_ != 0) {
output.writeInt32(2, sampleRateHertz_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getSynthesizeSpeechConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (audioEncoding_
!= com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding
.OUTPUT_AUDIO_ENCODING_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, audioEncoding_);
}
if (sampleRateHertz_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, sampleRateHertz_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(3, getSynthesizeSpeechConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig other =
(com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig) obj;
if (audioEncoding_ != other.audioEncoding_) return false;
if (getSampleRateHertz() != other.getSampleRateHertz()) return false;
if (hasSynthesizeSpeechConfig() != other.hasSynthesizeSpeechConfig()) return false;
if (hasSynthesizeSpeechConfig()) {
if (!getSynthesizeSpeechConfig().equals(other.getSynthesizeSpeechConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + AUDIO_ENCODING_FIELD_NUMBER;
hash = (53 * hash) + audioEncoding_;
hash = (37 * hash) + SAMPLE_RATE_HERTZ_FIELD_NUMBER;
hash = (53 * hash) + getSampleRateHertz();
if (hasSynthesizeSpeechConfig()) {
hash = (37 * hash) + SYNTHESIZE_SPEECH_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getSynthesizeSpeechConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Instructs the speech synthesizer how to generate the output audio content.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig)
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_OutputAudioConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_OutputAudioConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig.class,
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSynthesizeSpeechConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
audioEncoding_ = 0;
sampleRateHertz_ = 0;
synthesizeSpeechConfig_ = null;
if (synthesizeSpeechConfigBuilder_ != null) {
synthesizeSpeechConfigBuilder_.dispose();
synthesizeSpeechConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_OutputAudioConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig build() {
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig result =
new com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.audioEncoding_ = audioEncoding_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sampleRateHertz_ = sampleRateHertz_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.synthesizeSpeechConfig_ =
synthesizeSpeechConfigBuilder_ == null
? synthesizeSpeechConfig_
: synthesizeSpeechConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig other) {
if (other == com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig.getDefaultInstance())
return this;
if (other.audioEncoding_ != 0) {
setAudioEncodingValue(other.getAudioEncodingValue());
}
if (other.getSampleRateHertz() != 0) {
setSampleRateHertz(other.getSampleRateHertz());
}
if (other.hasSynthesizeSpeechConfig()) {
mergeSynthesizeSpeechConfig(other.getSynthesizeSpeechConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
audioEncoding_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
sampleRateHertz_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
input.readMessage(
getSynthesizeSpeechConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int audioEncoding_ = 0;
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for audioEncoding.
*/
@java.lang.Override
public int getAudioEncodingValue() {
return audioEncoding_;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for audioEncoding to set.
* @return This builder for chaining.
*/
public Builder setAudioEncodingValue(int value) {
audioEncoding_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The audioEncoding.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding getAudioEncoding() {
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding result =
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding.forNumber(audioEncoding_);
return result == null
? com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The audioEncoding to set.
* @return This builder for chaining.
*/
public Builder setAudioEncoding(
com.google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
audioEncoding_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearAudioEncoding() {
bitField0_ = (bitField0_ & ~0x00000001);
audioEncoding_ = 0;
onChanged();
return this;
}
private int sampleRateHertz_;
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @return The sampleRateHertz.
*/
@java.lang.Override
public int getSampleRateHertz() {
return sampleRateHertz_;
}
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @param value The sampleRateHertz to set.
* @return This builder for chaining.
*/
public Builder setSampleRateHertz(int value) {
sampleRateHertz_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearSampleRateHertz() {
bitField0_ = (bitField0_ & ~0x00000002);
sampleRateHertz_ = 0;
onChanged();
return this;
}
private com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesizeSpeechConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig,
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfigOrBuilder>
synthesizeSpeechConfigBuilder_;
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return Whether the synthesizeSpeechConfig field is set.
*/
public boolean hasSynthesizeSpeechConfig() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return The synthesizeSpeechConfig.
*/
public com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig
getSynthesizeSpeechConfig() {
if (synthesizeSpeechConfigBuilder_ == null) {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
} else {
return synthesizeSpeechConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder setSynthesizeSpeechConfig(
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig value) {
if (synthesizeSpeechConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
synthesizeSpeechConfig_ = value;
} else {
synthesizeSpeechConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder setSynthesizeSpeechConfig(
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.Builder builderForValue) {
if (synthesizeSpeechConfigBuilder_ == null) {
synthesizeSpeechConfig_ = builderForValue.build();
} else {
synthesizeSpeechConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder mergeSynthesizeSpeechConfig(
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig value) {
if (synthesizeSpeechConfigBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& synthesizeSpeechConfig_ != null
&& synthesizeSpeechConfig_
!= com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig
.getDefaultInstance()) {
getSynthesizeSpeechConfigBuilder().mergeFrom(value);
} else {
synthesizeSpeechConfig_ = value;
}
} else {
synthesizeSpeechConfigBuilder_.mergeFrom(value);
}
if (synthesizeSpeechConfig_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder clearSynthesizeSpeechConfig() {
bitField0_ = (bitField0_ & ~0x00000004);
synthesizeSpeechConfig_ = null;
if (synthesizeSpeechConfigBuilder_ != null) {
synthesizeSpeechConfigBuilder_.dispose();
synthesizeSpeechConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.Builder
getSynthesizeSpeechConfigBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getSynthesizeSpeechConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfigOrBuilder
getSynthesizeSpeechConfigOrBuilder() {
if (synthesizeSpeechConfigBuilder_ != null) {
return synthesizeSpeechConfigBuilder_.getMessageOrBuilder();
} else {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
}
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3beta1.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig,
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfigOrBuilder>
getSynthesizeSpeechConfigFieldBuilder() {
if (synthesizeSpeechConfigBuilder_ == null) {
synthesizeSpeechConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig,
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfig.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SynthesizeSpeechConfigOrBuilder>(
getSynthesizeSpeechConfig(), getParentForChildren(), isClean());
synthesizeSpeechConfig_ = null;
}
return synthesizeSpeechConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig)
private static final com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig();
}
public static com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OutputAudioConfig> PARSER =
new com.google.protobuf.AbstractParser<OutputAudioConfig>() {
@java.lang.Override
public OutputAudioConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<OutputAudioConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OutputAudioConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.OutputAudioConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,651 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/AdGroupOrBuilder.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/resources/ad_group.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.resources;
public interface AdGroupOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.ads.googleads.v19.resources.AdGroup)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Immutable. The resource name of the ad group.
* Ad group resource names have the form:
*
* `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
java.lang.String getResourceName();
/**
* <pre>
* Immutable. The resource name of the ad group.
* Ad group resource names have the form:
*
* `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
com.google.protobuf.ByteString
getResourceNameBytes();
/**
* <pre>
* Output only. The ID of the ad group.
* </pre>
*
* <code>optional int64 id = 34 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the id field is set.
*/
boolean hasId();
/**
* <pre>
* Output only. The ID of the ad group.
* </pre>
*
* <code>optional int64 id = 34 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The id.
*/
long getId();
/**
* <pre>
* The name of the ad group.
*
* This field is required and should not be empty when creating new ad
* groups.
*
* It must contain fewer than 255 UTF-8 full-width characters.
*
* It must not contain any null (code point 0x0), NL line feed
* (code point 0xA) or carriage return (code point 0xD) characters.
* </pre>
*
* <code>optional string name = 35;</code>
* @return Whether the name field is set.
*/
boolean hasName();
/**
* <pre>
* The name of the ad group.
*
* This field is required and should not be empty when creating new ad
* groups.
*
* It must contain fewer than 255 UTF-8 full-width characters.
*
* It must not contain any null (code point 0x0), NL line feed
* (code point 0xA) or carriage return (code point 0xD) characters.
* </pre>
*
* <code>optional string name = 35;</code>
* @return The name.
*/
java.lang.String getName();
/**
* <pre>
* The name of the ad group.
*
* This field is required and should not be empty when creating new ad
* groups.
*
* It must contain fewer than 255 UTF-8 full-width characters.
*
* It must not contain any null (code point 0x0), NL line feed
* (code point 0xA) or carriage return (code point 0xD) characters.
* </pre>
*
* <code>optional string name = 35;</code>
* @return The bytes for name.
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <pre>
* The status of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupStatusEnum.AdGroupStatus status = 5;</code>
* @return The enum numeric value on the wire for status.
*/
int getStatusValue();
/**
* <pre>
* The status of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupStatusEnum.AdGroupStatus status = 5;</code>
* @return The status.
*/
com.google.ads.googleads.v19.enums.AdGroupStatusEnum.AdGroupStatus getStatus();
/**
* <pre>
* Immutable. The type of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupTypeEnum.AdGroupType type = 12 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The enum numeric value on the wire for type.
*/
int getTypeValue();
/**
* <pre>
* Immutable. The type of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupTypeEnum.AdGroupType type = 12 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The type.
*/
com.google.ads.googleads.v19.enums.AdGroupTypeEnum.AdGroupType getType();
/**
* <pre>
* The ad rotation mode of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupAdRotationModeEnum.AdGroupAdRotationMode ad_rotation_mode = 22;</code>
* @return The enum numeric value on the wire for adRotationMode.
*/
int getAdRotationModeValue();
/**
* <pre>
* The ad rotation mode of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupAdRotationModeEnum.AdGroupAdRotationMode ad_rotation_mode = 22;</code>
* @return The adRotationMode.
*/
com.google.ads.googleads.v19.enums.AdGroupAdRotationModeEnum.AdGroupAdRotationMode getAdRotationMode();
/**
* <pre>
* Output only. For draft or experiment ad groups, this field is the resource
* name of the base ad group from which this ad group was created. If a draft
* or experiment ad group does not have a base ad group, then this field is
* null.
*
* For base ad groups, this field equals the ad group resource name.
*
* This field is read-only.
* </pre>
*
* <code>optional string base_ad_group = 36 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the baseAdGroup field is set.
*/
boolean hasBaseAdGroup();
/**
* <pre>
* Output only. For draft or experiment ad groups, this field is the resource
* name of the base ad group from which this ad group was created. If a draft
* or experiment ad group does not have a base ad group, then this field is
* null.
*
* For base ad groups, this field equals the ad group resource name.
*
* This field is read-only.
* </pre>
*
* <code>optional string base_ad_group = 36 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The baseAdGroup.
*/
java.lang.String getBaseAdGroup();
/**
* <pre>
* Output only. For draft or experiment ad groups, this field is the resource
* name of the base ad group from which this ad group was created. If a draft
* or experiment ad group does not have a base ad group, then this field is
* null.
*
* For base ad groups, this field equals the ad group resource name.
*
* This field is read-only.
* </pre>
*
* <code>optional string base_ad_group = 36 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for baseAdGroup.
*/
com.google.protobuf.ByteString
getBaseAdGroupBytes();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 37;</code>
* @return Whether the trackingUrlTemplate field is set.
*/
boolean hasTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 37;</code>
* @return The trackingUrlTemplate.
*/
java.lang.String getTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 37;</code>
* @return The bytes for trackingUrlTemplate.
*/
com.google.protobuf.ByteString
getTrackingUrlTemplateBytes();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 6;</code>
*/
java.util.List<com.google.ads.googleads.v19.common.CustomParameter>
getUrlCustomParametersList();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 6;</code>
*/
com.google.ads.googleads.v19.common.CustomParameter getUrlCustomParameters(int index);
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 6;</code>
*/
int getUrlCustomParametersCount();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 6;</code>
*/
java.util.List<? extends com.google.ads.googleads.v19.common.CustomParameterOrBuilder>
getUrlCustomParametersOrBuilderList();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 6;</code>
*/
com.google.ads.googleads.v19.common.CustomParameterOrBuilder getUrlCustomParametersOrBuilder(
int index);
/**
* <pre>
* Immutable. The campaign to which the ad group belongs.
* </pre>
*
* <code>optional string campaign = 38 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return Whether the campaign field is set.
*/
boolean hasCampaign();
/**
* <pre>
* Immutable. The campaign to which the ad group belongs.
* </pre>
*
* <code>optional string campaign = 38 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The campaign.
*/
java.lang.String getCampaign();
/**
* <pre>
* Immutable. The campaign to which the ad group belongs.
* </pre>
*
* <code>optional string campaign = 38 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for campaign.
*/
com.google.protobuf.ByteString
getCampaignBytes();
/**
* <pre>
* The maximum CPC (cost-per-click) bid.
* </pre>
*
* <code>optional int64 cpc_bid_micros = 39;</code>
* @return Whether the cpcBidMicros field is set.
*/
boolean hasCpcBidMicros();
/**
* <pre>
* The maximum CPC (cost-per-click) bid.
* </pre>
*
* <code>optional int64 cpc_bid_micros = 39;</code>
* @return The cpcBidMicros.
*/
long getCpcBidMicros();
/**
* <pre>
* Output only. Value will be same as that of the CPC (cost-per-click) bid
* value when the bidding strategy is one of manual cpc, enhanced cpc, page
* one promoted or target outrank share, otherwise the value will be null.
* </pre>
*
* <code>optional int64 effective_cpc_bid_micros = 57 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the effectiveCpcBidMicros field is set.
*/
boolean hasEffectiveCpcBidMicros();
/**
* <pre>
* Output only. Value will be same as that of the CPC (cost-per-click) bid
* value when the bidding strategy is one of manual cpc, enhanced cpc, page
* one promoted or target outrank share, otherwise the value will be null.
* </pre>
*
* <code>optional int64 effective_cpc_bid_micros = 57 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveCpcBidMicros.
*/
long getEffectiveCpcBidMicros();
/**
* <pre>
* The maximum CPM (cost-per-thousand viewable impressions) bid.
* </pre>
*
* <code>optional int64 cpm_bid_micros = 40;</code>
* @return Whether the cpmBidMicros field is set.
*/
boolean hasCpmBidMicros();
/**
* <pre>
* The maximum CPM (cost-per-thousand viewable impressions) bid.
* </pre>
*
* <code>optional int64 cpm_bid_micros = 40;</code>
* @return The cpmBidMicros.
*/
long getCpmBidMicros();
/**
* <pre>
* The target CPA (cost-per-acquisition). If the ad group's campaign
* bidding strategy is TargetCpa or MaximizeConversions (with its target_cpa
* field set), then this field overrides the target CPA specified in the
* campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional int64 target_cpa_micros = 41;</code>
* @return Whether the targetCpaMicros field is set.
*/
boolean hasTargetCpaMicros();
/**
* <pre>
* The target CPA (cost-per-acquisition). If the ad group's campaign
* bidding strategy is TargetCpa or MaximizeConversions (with its target_cpa
* field set), then this field overrides the target CPA specified in the
* campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional int64 target_cpa_micros = 41;</code>
* @return The targetCpaMicros.
*/
long getTargetCpaMicros();
/**
* <pre>
* The CPV (cost-per-view) bid.
* </pre>
*
* <code>optional int64 cpv_bid_micros = 42;</code>
* @return Whether the cpvBidMicros field is set.
*/
boolean hasCpvBidMicros();
/**
* <pre>
* The CPV (cost-per-view) bid.
* </pre>
*
* <code>optional int64 cpv_bid_micros = 42;</code>
* @return The cpvBidMicros.
*/
long getCpvBidMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every
* thousand times the ad is shown.
* </pre>
*
* <code>optional int64 target_cpm_micros = 43;</code>
* @return Whether the targetCpmMicros field is set.
*/
boolean hasTargetCpmMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every
* thousand times the ad is shown.
* </pre>
*
* <code>optional int64 target_cpm_micros = 43;</code>
* @return The targetCpmMicros.
*/
long getTargetCpmMicros();
/**
* <pre>
* The target ROAS (return-on-ad-spend) override. If the ad group's campaign
* bidding strategy is TargetRoas or MaximizeConversionValue (with its
* target_roas field set), then this field overrides the target ROAS specified
* in the campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional double target_roas = 44;</code>
* @return Whether the targetRoas field is set.
*/
boolean hasTargetRoas();
/**
* <pre>
* The target ROAS (return-on-ad-spend) override. If the ad group's campaign
* bidding strategy is TargetRoas or MaximizeConversionValue (with its
* target_roas field set), then this field overrides the target ROAS specified
* in the campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional double target_roas = 44;</code>
* @return The targetRoas.
*/
double getTargetRoas();
/**
* <pre>
* The percent cpc bid amount, expressed as a fraction of the advertised price
* for some good or service. The valid range for the fraction is [0,1) and the
* value stored here is 1,000,000 * [fraction].
* </pre>
*
* <code>optional int64 percent_cpc_bid_micros = 45;</code>
* @return Whether the percentCpcBidMicros field is set.
*/
boolean hasPercentCpcBidMicros();
/**
* <pre>
* The percent cpc bid amount, expressed as a fraction of the advertised price
* for some good or service. The valid range for the fraction is [0,1) and the
* value stored here is 1,000,000 * [fraction].
* </pre>
*
* <code>optional int64 percent_cpc_bid_micros = 45;</code>
* @return The percentCpcBidMicros.
*/
long getPercentCpcBidMicros();
/**
* <pre>
* The fixed amount in micros that the advertiser pays for every thousand
* impressions of the ad.
* </pre>
*
* <code>optional int64 fixed_cpm_micros = 64;</code>
* @return Whether the fixedCpmMicros field is set.
*/
boolean hasFixedCpmMicros();
/**
* <pre>
* The fixed amount in micros that the advertiser pays for every thousand
* impressions of the ad.
* </pre>
*
* <code>optional int64 fixed_cpm_micros = 64;</code>
* @return The fixedCpmMicros.
*/
long getFixedCpmMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every ad
* view.
* </pre>
*
* <code>optional int64 target_cpv_micros = 65;</code>
* @return Whether the targetCpvMicros field is set.
*/
boolean hasTargetCpvMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every ad
* view.
* </pre>
*
* <code>optional int64 target_cpv_micros = 65;</code>
* @return The targetCpvMicros.
*/
long getTargetCpvMicros();
/**
* <pre>
* True if optimized targeting is enabled. Optimized Targeting is the
* replacement for Audience Expansion.
* </pre>
*
* <code>bool optimized_targeting_enabled = 59;</code>
* @return The optimizedTargetingEnabled.
*/
boolean getOptimizedTargetingEnabled();
/**
* <pre>
* When this value is true, demographics will be excluded from the types of
* targeting which are expanded when optimized_targeting_enabled is true.
* When optimized_targeting_enabled is false, this field is ignored. Default
* is false.
* </pre>
*
* <code>bool exclude_demographic_expansion = 67;</code>
* @return The excludeDemographicExpansion.
*/
boolean getExcludeDemographicExpansion();
/**
* <pre>
* Allows advertisers to specify a targeting dimension on which to place
* absolute bids. This is only applicable for campaigns that target only the
* display network and not search.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.TargetingDimensionEnum.TargetingDimension display_custom_bid_dimension = 23;</code>
* @return The enum numeric value on the wire for displayCustomBidDimension.
*/
int getDisplayCustomBidDimensionValue();
/**
* <pre>
* Allows advertisers to specify a targeting dimension on which to place
* absolute bids. This is only applicable for campaigns that target only the
* display network and not search.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.TargetingDimensionEnum.TargetingDimension display_custom_bid_dimension = 23;</code>
* @return The displayCustomBidDimension.
*/
com.google.ads.googleads.v19.enums.TargetingDimensionEnum.TargetingDimension getDisplayCustomBidDimension();
/**
* <pre>
* URL template for appending params to Final URL.
* </pre>
*
* <code>optional string final_url_suffix = 46;</code>
* @return Whether the finalUrlSuffix field is set.
*/
boolean hasFinalUrlSuffix();
/**
* <pre>
* URL template for appending params to Final URL.
* </pre>
*
* <code>optional string final_url_suffix = 46;</code>
* @return The finalUrlSuffix.
*/
java.lang.String getFinalUrlSuffix();
/**
* <pre>
* URL template for appending params to Final URL.
* </pre>
*
* <code>optional string final_url_suffix = 46;</code>
* @return The bytes for finalUrlSuffix.
*/
com.google.protobuf.ByteString
getFinalUrlSuffixBytes();
/**
* <pre>
* Setting for targeting related features.
* </pre>
*
* <code>.google.ads.googleads.v19.common.TargetingSetting targeting_setting = 25;</code>
* @return Whether the targetingSetting field is set.
*/
boolean hasTargetingSetting();
/**
* <pre>
* Setting for targeting related features.
* </pre>
*
* <code>.google.ads.googleads.v19.common.TargetingSetting targeting_setting = 25;</code>
* @return The targetingSetting.
*/
com.google.ads.googleads.v19.common.TargetingSetting getTargetingSetting();
/**
* <pre>
* Setting for targeting related features.
* </pre>
*
* <code>.google.ads.googleads.v19.common.TargetingSetting targeting_setting = 25;</code>
*/
com.google.ads.googleads.v19.common.TargetingSettingOrBuilder getTargetingSettingOrBuilder();
/**
* <pre>
* Immutable. Setting for audience related features.
* </pre>
*
* <code>.google.ads.googleads.v19.resources.AdGroup.AudienceSetting audience_setting = 56 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the audienceSetting field is set.
*/
boolean hasAudienceSetting();
/**
* <pre>
* Immutable. Setting for audience related features.
* </pre>
*
* <code>.google.ads.googleads.v19.resources.AdGroup.AudienceSetting audience_setting = 56 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The audienceSetting.
*/
com.google.ads.googleads.v19.resources.AdGroup.AudienceSetting getAudienceSetting();
/**
* <pre>
* Immutable. Setting for audience related features.
* </pre>
*
* <code>.google.ads.googleads.v19.resources.AdGroup.AudienceSetting audience_setting = 56 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v19.resources.AdGroup.AudienceSettingOrBuilder getAudienceSettingOrBuilder();
/**
* <pre>
* Output only. The effective target CPA (cost-per-acquisition).
* This field is read-only.
* </pre>
*
* <code>optional int64 effective_target_cpa_micros = 47 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the effectiveTargetCpaMicros field is set.
*/
boolean hasEffectiveTargetCpaMicros();
/**
* <pre>
* Output only. The effective target CPA (cost-per-acquisition).
* This field is read-only.
* </pre>
*
* <code>optional int64 effective_target_cpa_micros = 47 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetCpaMicros.
*/
long getEffectiveTargetCpaMicros();
/**
* <pre>
* Output only. Source of the effective target CPA.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.BiddingSourceEnum.BiddingSource effective_target_cpa_source = 29 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for effectiveTargetCpaSource.
*/
int getEffectiveTargetCpaSourceValue();
/**
* <pre>
* Output only. Source of the effective target CPA.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.BiddingSourceEnum.BiddingSource effective_target_cpa_source = 29 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetCpaSource.
*/
com.google.ads.googleads.v19.enums.BiddingSourceEnum.BiddingSource getEffectiveTargetCpaSource();
/**
* <pre>
* Output only. The effective target ROAS (return-on-ad-spend).
* This field is read-only.
* </pre>
*
* <code>optional double effective_target_roas = 48 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the effectiveTargetRoas field is set.
*/
boolean hasEffectiveTargetRoas();
/**
* <pre>
* Output only. The effective target ROAS (return-on-ad-spend).
* This field is read-only.
* </pre>
*
* <code>optional double effective_target_roas = 48 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetRoas.
*/
double getEffectiveTargetRoas();
/**
* <pre>
* Output only. Source of the effective target ROAS.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.BiddingSourceEnum.BiddingSource effective_target_roas_source = 32 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for effectiveTargetRoasSource.
*/
int getEffectiveTargetRoasSourceValue();
/**
* <pre>
* Output only. Source of the effective target ROAS.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.BiddingSourceEnum.BiddingSource effective_target_roas_source = 32 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetRoasSource.
*/
com.google.ads.googleads.v19.enums.BiddingSourceEnum.BiddingSource getEffectiveTargetRoasSource();
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return A list containing the labels.
*/
java.util.List<java.lang.String>
getLabelsList();
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The count of labels.
*/
int getLabelsCount();
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param index The index of the element to return.
* @return The labels at the given index.
*/
java.lang.String getLabels(int index);
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param index The index of the value to return.
* @return The bytes of the labels at the given index.
*/
com.google.protobuf.ByteString
getLabelsBytes(int index);
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @return A list containing the excludedParentAssetFieldTypes.
*/
java.util.List<com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType> getExcludedParentAssetFieldTypesList();
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @return The count of excludedParentAssetFieldTypes.
*/
int getExcludedParentAssetFieldTypesCount();
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @param index The index of the element to return.
* @return The excludedParentAssetFieldTypes at the given index.
*/
com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType getExcludedParentAssetFieldTypes(int index);
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @return A list containing the enum numeric values on the wire for excludedParentAssetFieldTypes.
*/
java.util.List<java.lang.Integer>
getExcludedParentAssetFieldTypesValueList();
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of excludedParentAssetFieldTypes at the given index.
*/
int getExcludedParentAssetFieldTypesValue(int index);
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @return A list containing the excludedParentAssetSetTypes.
*/
java.util.List<com.google.ads.googleads.v19.enums.AssetSetTypeEnum.AssetSetType> getExcludedParentAssetSetTypesList();
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @return The count of excludedParentAssetSetTypes.
*/
int getExcludedParentAssetSetTypesCount();
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @param index The index of the element to return.
* @return The excludedParentAssetSetTypes at the given index.
*/
com.google.ads.googleads.v19.enums.AssetSetTypeEnum.AssetSetType getExcludedParentAssetSetTypes(int index);
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @return A list containing the enum numeric values on the wire for excludedParentAssetSetTypes.
*/
java.util.List<java.lang.Integer>
getExcludedParentAssetSetTypesValueList();
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of excludedParentAssetSetTypes at the given index.
*/
int getExcludedParentAssetSetTypesValue(int index);
/**
* <pre>
* Output only. Provides aggregated view into why an ad group is not serving
* or not serving optimally.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupPrimaryStatusEnum.AdGroupPrimaryStatus primary_status = 62 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for primaryStatus.
*/
int getPrimaryStatusValue();
/**
* <pre>
* Output only. Provides aggregated view into why an ad group is not serving
* or not serving optimally.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AdGroupPrimaryStatusEnum.AdGroupPrimaryStatus primary_status = 62 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The primaryStatus.
*/
com.google.ads.googleads.v19.enums.AdGroupPrimaryStatusEnum.AdGroupPrimaryStatus getPrimaryStatus();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return A list containing the primaryStatusReasons.
*/
java.util.List<com.google.ads.googleads.v19.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason> getPrimaryStatusReasonsList();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The count of primaryStatusReasons.
*/
int getPrimaryStatusReasonsCount();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param index The index of the element to return.
* @return The primaryStatusReasons at the given index.
*/
com.google.ads.googleads.v19.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason getPrimaryStatusReasons(int index);
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return A list containing the enum numeric values on the wire for primaryStatusReasons.
*/
java.util.List<java.lang.Integer>
getPrimaryStatusReasonsValueList();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of primaryStatusReasons at the given index.
*/
int getPrimaryStatusReasonsValue(int index);
/**
* <pre>
* Settings for Demand Gen ad groups.
* </pre>
*
* <code>.google.ads.googleads.v19.resources.AdGroup.DemandGenAdGroupSettings demand_gen_ad_group_settings = 91;</code>
* @return Whether the demandGenAdGroupSettings field is set.
*/
boolean hasDemandGenAdGroupSettings();
/**
* <pre>
* Settings for Demand Gen ad groups.
* </pre>
*
* <code>.google.ads.googleads.v19.resources.AdGroup.DemandGenAdGroupSettings demand_gen_ad_group_settings = 91;</code>
* @return The demandGenAdGroupSettings.
*/
com.google.ads.googleads.v19.resources.AdGroup.DemandGenAdGroupSettings getDemandGenAdGroupSettings();
/**
* <pre>
* Settings for Demand Gen ad groups.
* </pre>
*
* <code>.google.ads.googleads.v19.resources.AdGroup.DemandGenAdGroupSettings demand_gen_ad_group_settings = 91;</code>
*/
com.google.ads.googleads.v19.resources.AdGroup.DemandGenAdGroupSettingsOrBuilder getDemandGenAdGroupSettingsOrBuilder();
}
|
googleads/google-ads-java | 37,651 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/AdGroupOrBuilder.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/resources/ad_group.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.resources;
public interface AdGroupOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.ads.googleads.v20.resources.AdGroup)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Immutable. The resource name of the ad group.
* Ad group resource names have the form:
*
* `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
java.lang.String getResourceName();
/**
* <pre>
* Immutable. The resource name of the ad group.
* Ad group resource names have the form:
*
* `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
com.google.protobuf.ByteString
getResourceNameBytes();
/**
* <pre>
* Output only. The ID of the ad group.
* </pre>
*
* <code>optional int64 id = 34 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the id field is set.
*/
boolean hasId();
/**
* <pre>
* Output only. The ID of the ad group.
* </pre>
*
* <code>optional int64 id = 34 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The id.
*/
long getId();
/**
* <pre>
* The name of the ad group.
*
* This field is required and should not be empty when creating new ad
* groups.
*
* It must contain fewer than 255 UTF-8 full-width characters.
*
* It must not contain any null (code point 0x0), NL line feed
* (code point 0xA) or carriage return (code point 0xD) characters.
* </pre>
*
* <code>optional string name = 35;</code>
* @return Whether the name field is set.
*/
boolean hasName();
/**
* <pre>
* The name of the ad group.
*
* This field is required and should not be empty when creating new ad
* groups.
*
* It must contain fewer than 255 UTF-8 full-width characters.
*
* It must not contain any null (code point 0x0), NL line feed
* (code point 0xA) or carriage return (code point 0xD) characters.
* </pre>
*
* <code>optional string name = 35;</code>
* @return The name.
*/
java.lang.String getName();
/**
* <pre>
* The name of the ad group.
*
* This field is required and should not be empty when creating new ad
* groups.
*
* It must contain fewer than 255 UTF-8 full-width characters.
*
* It must not contain any null (code point 0x0), NL line feed
* (code point 0xA) or carriage return (code point 0xD) characters.
* </pre>
*
* <code>optional string name = 35;</code>
* @return The bytes for name.
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <pre>
* The status of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupStatusEnum.AdGroupStatus status = 5;</code>
* @return The enum numeric value on the wire for status.
*/
int getStatusValue();
/**
* <pre>
* The status of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupStatusEnum.AdGroupStatus status = 5;</code>
* @return The status.
*/
com.google.ads.googleads.v20.enums.AdGroupStatusEnum.AdGroupStatus getStatus();
/**
* <pre>
* Immutable. The type of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupTypeEnum.AdGroupType type = 12 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The enum numeric value on the wire for type.
*/
int getTypeValue();
/**
* <pre>
* Immutable. The type of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupTypeEnum.AdGroupType type = 12 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The type.
*/
com.google.ads.googleads.v20.enums.AdGroupTypeEnum.AdGroupType getType();
/**
* <pre>
* The ad rotation mode of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupAdRotationModeEnum.AdGroupAdRotationMode ad_rotation_mode = 22;</code>
* @return The enum numeric value on the wire for adRotationMode.
*/
int getAdRotationModeValue();
/**
* <pre>
* The ad rotation mode of the ad group.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupAdRotationModeEnum.AdGroupAdRotationMode ad_rotation_mode = 22;</code>
* @return The adRotationMode.
*/
com.google.ads.googleads.v20.enums.AdGroupAdRotationModeEnum.AdGroupAdRotationMode getAdRotationMode();
/**
* <pre>
* Output only. For draft or experiment ad groups, this field is the resource
* name of the base ad group from which this ad group was created. If a draft
* or experiment ad group does not have a base ad group, then this field is
* null.
*
* For base ad groups, this field equals the ad group resource name.
*
* This field is read-only.
* </pre>
*
* <code>optional string base_ad_group = 36 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the baseAdGroup field is set.
*/
boolean hasBaseAdGroup();
/**
* <pre>
* Output only. For draft or experiment ad groups, this field is the resource
* name of the base ad group from which this ad group was created. If a draft
* or experiment ad group does not have a base ad group, then this field is
* null.
*
* For base ad groups, this field equals the ad group resource name.
*
* This field is read-only.
* </pre>
*
* <code>optional string base_ad_group = 36 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The baseAdGroup.
*/
java.lang.String getBaseAdGroup();
/**
* <pre>
* Output only. For draft or experiment ad groups, this field is the resource
* name of the base ad group from which this ad group was created. If a draft
* or experiment ad group does not have a base ad group, then this field is
* null.
*
* For base ad groups, this field equals the ad group resource name.
*
* This field is read-only.
* </pre>
*
* <code>optional string base_ad_group = 36 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for baseAdGroup.
*/
com.google.protobuf.ByteString
getBaseAdGroupBytes();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 37;</code>
* @return Whether the trackingUrlTemplate field is set.
*/
boolean hasTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 37;</code>
* @return The trackingUrlTemplate.
*/
java.lang.String getTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 37;</code>
* @return The bytes for trackingUrlTemplate.
*/
com.google.protobuf.ByteString
getTrackingUrlTemplateBytes();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 6;</code>
*/
java.util.List<com.google.ads.googleads.v20.common.CustomParameter>
getUrlCustomParametersList();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 6;</code>
*/
com.google.ads.googleads.v20.common.CustomParameter getUrlCustomParameters(int index);
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 6;</code>
*/
int getUrlCustomParametersCount();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 6;</code>
*/
java.util.List<? extends com.google.ads.googleads.v20.common.CustomParameterOrBuilder>
getUrlCustomParametersOrBuilderList();
/**
* <pre>
* The list of mappings used to substitute custom parameter tags in a
* `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 6;</code>
*/
com.google.ads.googleads.v20.common.CustomParameterOrBuilder getUrlCustomParametersOrBuilder(
int index);
/**
* <pre>
* Immutable. The campaign to which the ad group belongs.
* </pre>
*
* <code>optional string campaign = 38 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return Whether the campaign field is set.
*/
boolean hasCampaign();
/**
* <pre>
* Immutable. The campaign to which the ad group belongs.
* </pre>
*
* <code>optional string campaign = 38 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The campaign.
*/
java.lang.String getCampaign();
/**
* <pre>
* Immutable. The campaign to which the ad group belongs.
* </pre>
*
* <code>optional string campaign = 38 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for campaign.
*/
com.google.protobuf.ByteString
getCampaignBytes();
/**
* <pre>
* The maximum CPC (cost-per-click) bid.
* </pre>
*
* <code>optional int64 cpc_bid_micros = 39;</code>
* @return Whether the cpcBidMicros field is set.
*/
boolean hasCpcBidMicros();
/**
* <pre>
* The maximum CPC (cost-per-click) bid.
* </pre>
*
* <code>optional int64 cpc_bid_micros = 39;</code>
* @return The cpcBidMicros.
*/
long getCpcBidMicros();
/**
* <pre>
* Output only. Value will be same as that of the CPC (cost-per-click) bid
* value when the bidding strategy is one of manual cpc, enhanced cpc, page
* one promoted or target outrank share, otherwise the value will be null.
* </pre>
*
* <code>optional int64 effective_cpc_bid_micros = 57 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the effectiveCpcBidMicros field is set.
*/
boolean hasEffectiveCpcBidMicros();
/**
* <pre>
* Output only. Value will be same as that of the CPC (cost-per-click) bid
* value when the bidding strategy is one of manual cpc, enhanced cpc, page
* one promoted or target outrank share, otherwise the value will be null.
* </pre>
*
* <code>optional int64 effective_cpc_bid_micros = 57 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveCpcBidMicros.
*/
long getEffectiveCpcBidMicros();
/**
* <pre>
* The maximum CPM (cost-per-thousand viewable impressions) bid.
* </pre>
*
* <code>optional int64 cpm_bid_micros = 40;</code>
* @return Whether the cpmBidMicros field is set.
*/
boolean hasCpmBidMicros();
/**
* <pre>
* The maximum CPM (cost-per-thousand viewable impressions) bid.
* </pre>
*
* <code>optional int64 cpm_bid_micros = 40;</code>
* @return The cpmBidMicros.
*/
long getCpmBidMicros();
/**
* <pre>
* The target CPA (cost-per-acquisition). If the ad group's campaign
* bidding strategy is TargetCpa or MaximizeConversions (with its target_cpa
* field set), then this field overrides the target CPA specified in the
* campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional int64 target_cpa_micros = 41;</code>
* @return Whether the targetCpaMicros field is set.
*/
boolean hasTargetCpaMicros();
/**
* <pre>
* The target CPA (cost-per-acquisition). If the ad group's campaign
* bidding strategy is TargetCpa or MaximizeConversions (with its target_cpa
* field set), then this field overrides the target CPA specified in the
* campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional int64 target_cpa_micros = 41;</code>
* @return The targetCpaMicros.
*/
long getTargetCpaMicros();
/**
* <pre>
* The CPV (cost-per-view) bid.
* </pre>
*
* <code>optional int64 cpv_bid_micros = 42;</code>
* @return Whether the cpvBidMicros field is set.
*/
boolean hasCpvBidMicros();
/**
* <pre>
* The CPV (cost-per-view) bid.
* </pre>
*
* <code>optional int64 cpv_bid_micros = 42;</code>
* @return The cpvBidMicros.
*/
long getCpvBidMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every
* thousand times the ad is shown.
* </pre>
*
* <code>optional int64 target_cpm_micros = 43;</code>
* @return Whether the targetCpmMicros field is set.
*/
boolean hasTargetCpmMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every
* thousand times the ad is shown.
* </pre>
*
* <code>optional int64 target_cpm_micros = 43;</code>
* @return The targetCpmMicros.
*/
long getTargetCpmMicros();
/**
* <pre>
* The target ROAS (return-on-ad-spend) override. If the ad group's campaign
* bidding strategy is TargetRoas or MaximizeConversionValue (with its
* target_roas field set), then this field overrides the target ROAS specified
* in the campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional double target_roas = 44;</code>
* @return Whether the targetRoas field is set.
*/
boolean hasTargetRoas();
/**
* <pre>
* The target ROAS (return-on-ad-spend) override. If the ad group's campaign
* bidding strategy is TargetRoas or MaximizeConversionValue (with its
* target_roas field set), then this field overrides the target ROAS specified
* in the campaign's bidding strategy.
* Otherwise, this value is ignored.
* </pre>
*
* <code>optional double target_roas = 44;</code>
* @return The targetRoas.
*/
double getTargetRoas();
/**
* <pre>
* The percent cpc bid amount, expressed as a fraction of the advertised price
* for some good or service. The valid range for the fraction is [0,1) and the
* value stored here is 1,000,000 * [fraction].
* </pre>
*
* <code>optional int64 percent_cpc_bid_micros = 45;</code>
* @return Whether the percentCpcBidMicros field is set.
*/
boolean hasPercentCpcBidMicros();
/**
* <pre>
* The percent cpc bid amount, expressed as a fraction of the advertised price
* for some good or service. The valid range for the fraction is [0,1) and the
* value stored here is 1,000,000 * [fraction].
* </pre>
*
* <code>optional int64 percent_cpc_bid_micros = 45;</code>
* @return The percentCpcBidMicros.
*/
long getPercentCpcBidMicros();
/**
* <pre>
* The fixed amount in micros that the advertiser pays for every thousand
* impressions of the ad.
* </pre>
*
* <code>optional int64 fixed_cpm_micros = 64;</code>
* @return Whether the fixedCpmMicros field is set.
*/
boolean hasFixedCpmMicros();
/**
* <pre>
* The fixed amount in micros that the advertiser pays for every thousand
* impressions of the ad.
* </pre>
*
* <code>optional int64 fixed_cpm_micros = 64;</code>
* @return The fixedCpmMicros.
*/
long getFixedCpmMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every ad
* view.
* </pre>
*
* <code>optional int64 target_cpv_micros = 65;</code>
* @return Whether the targetCpvMicros field is set.
*/
boolean hasTargetCpvMicros();
/**
* <pre>
* Average amount in micros that the advertiser is willing to pay for every ad
* view.
* </pre>
*
* <code>optional int64 target_cpv_micros = 65;</code>
* @return The targetCpvMicros.
*/
long getTargetCpvMicros();
/**
* <pre>
* True if optimized targeting is enabled. Optimized Targeting is the
* replacement for Audience Expansion.
* </pre>
*
* <code>bool optimized_targeting_enabled = 59;</code>
* @return The optimizedTargetingEnabled.
*/
boolean getOptimizedTargetingEnabled();
/**
* <pre>
* When this value is true, demographics will be excluded from the types of
* targeting which are expanded when optimized_targeting_enabled is true.
* When optimized_targeting_enabled is false, this field is ignored. Default
* is false.
* </pre>
*
* <code>bool exclude_demographic_expansion = 67;</code>
* @return The excludeDemographicExpansion.
*/
boolean getExcludeDemographicExpansion();
/**
* <pre>
* Allows advertisers to specify a targeting dimension on which to place
* absolute bids. This is only applicable for campaigns that target only the
* display network and not search.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.TargetingDimensionEnum.TargetingDimension display_custom_bid_dimension = 23;</code>
* @return The enum numeric value on the wire for displayCustomBidDimension.
*/
int getDisplayCustomBidDimensionValue();
/**
* <pre>
* Allows advertisers to specify a targeting dimension on which to place
* absolute bids. This is only applicable for campaigns that target only the
* display network and not search.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.TargetingDimensionEnum.TargetingDimension display_custom_bid_dimension = 23;</code>
* @return The displayCustomBidDimension.
*/
com.google.ads.googleads.v20.enums.TargetingDimensionEnum.TargetingDimension getDisplayCustomBidDimension();
/**
* <pre>
* URL template for appending params to Final URL.
* </pre>
*
* <code>optional string final_url_suffix = 46;</code>
* @return Whether the finalUrlSuffix field is set.
*/
boolean hasFinalUrlSuffix();
/**
* <pre>
* URL template for appending params to Final URL.
* </pre>
*
* <code>optional string final_url_suffix = 46;</code>
* @return The finalUrlSuffix.
*/
java.lang.String getFinalUrlSuffix();
/**
* <pre>
* URL template for appending params to Final URL.
* </pre>
*
* <code>optional string final_url_suffix = 46;</code>
* @return The bytes for finalUrlSuffix.
*/
com.google.protobuf.ByteString
getFinalUrlSuffixBytes();
/**
* <pre>
* Setting for targeting related features.
* </pre>
*
* <code>.google.ads.googleads.v20.common.TargetingSetting targeting_setting = 25;</code>
* @return Whether the targetingSetting field is set.
*/
boolean hasTargetingSetting();
/**
* <pre>
* Setting for targeting related features.
* </pre>
*
* <code>.google.ads.googleads.v20.common.TargetingSetting targeting_setting = 25;</code>
* @return The targetingSetting.
*/
com.google.ads.googleads.v20.common.TargetingSetting getTargetingSetting();
/**
* <pre>
* Setting for targeting related features.
* </pre>
*
* <code>.google.ads.googleads.v20.common.TargetingSetting targeting_setting = 25;</code>
*/
com.google.ads.googleads.v20.common.TargetingSettingOrBuilder getTargetingSettingOrBuilder();
/**
* <pre>
* Immutable. Setting for audience related features.
* </pre>
*
* <code>.google.ads.googleads.v20.resources.AdGroup.AudienceSetting audience_setting = 56 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the audienceSetting field is set.
*/
boolean hasAudienceSetting();
/**
* <pre>
* Immutable. Setting for audience related features.
* </pre>
*
* <code>.google.ads.googleads.v20.resources.AdGroup.AudienceSetting audience_setting = 56 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The audienceSetting.
*/
com.google.ads.googleads.v20.resources.AdGroup.AudienceSetting getAudienceSetting();
/**
* <pre>
* Immutable. Setting for audience related features.
* </pre>
*
* <code>.google.ads.googleads.v20.resources.AdGroup.AudienceSetting audience_setting = 56 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v20.resources.AdGroup.AudienceSettingOrBuilder getAudienceSettingOrBuilder();
/**
* <pre>
* Output only. The effective target CPA (cost-per-acquisition).
* This field is read-only.
* </pre>
*
* <code>optional int64 effective_target_cpa_micros = 47 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the effectiveTargetCpaMicros field is set.
*/
boolean hasEffectiveTargetCpaMicros();
/**
* <pre>
* Output only. The effective target CPA (cost-per-acquisition).
* This field is read-only.
* </pre>
*
* <code>optional int64 effective_target_cpa_micros = 47 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetCpaMicros.
*/
long getEffectiveTargetCpaMicros();
/**
* <pre>
* Output only. Source of the effective target CPA.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.BiddingSourceEnum.BiddingSource effective_target_cpa_source = 29 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for effectiveTargetCpaSource.
*/
int getEffectiveTargetCpaSourceValue();
/**
* <pre>
* Output only. Source of the effective target CPA.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.BiddingSourceEnum.BiddingSource effective_target_cpa_source = 29 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetCpaSource.
*/
com.google.ads.googleads.v20.enums.BiddingSourceEnum.BiddingSource getEffectiveTargetCpaSource();
/**
* <pre>
* Output only. The effective target ROAS (return-on-ad-spend).
* This field is read-only.
* </pre>
*
* <code>optional double effective_target_roas = 48 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the effectiveTargetRoas field is set.
*/
boolean hasEffectiveTargetRoas();
/**
* <pre>
* Output only. The effective target ROAS (return-on-ad-spend).
* This field is read-only.
* </pre>
*
* <code>optional double effective_target_roas = 48 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetRoas.
*/
double getEffectiveTargetRoas();
/**
* <pre>
* Output only. Source of the effective target ROAS.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.BiddingSourceEnum.BiddingSource effective_target_roas_source = 32 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for effectiveTargetRoasSource.
*/
int getEffectiveTargetRoasSourceValue();
/**
* <pre>
* Output only. Source of the effective target ROAS.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.BiddingSourceEnum.BiddingSource effective_target_roas_source = 32 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The effectiveTargetRoasSource.
*/
com.google.ads.googleads.v20.enums.BiddingSourceEnum.BiddingSource getEffectiveTargetRoasSource();
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return A list containing the labels.
*/
java.util.List<java.lang.String>
getLabelsList();
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The count of labels.
*/
int getLabelsCount();
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param index The index of the element to return.
* @return The labels at the given index.
*/
java.lang.String getLabels(int index);
/**
* <pre>
* Output only. The resource names of labels attached to this ad group.
* </pre>
*
* <code>repeated string labels = 49 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param index The index of the value to return.
* @return The bytes of the labels at the given index.
*/
com.google.protobuf.ByteString
getLabelsBytes(int index);
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @return A list containing the excludedParentAssetFieldTypes.
*/
java.util.List<com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType> getExcludedParentAssetFieldTypesList();
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @return The count of excludedParentAssetFieldTypes.
*/
int getExcludedParentAssetFieldTypesCount();
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @param index The index of the element to return.
* @return The excludedParentAssetFieldTypes at the given index.
*/
com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType getExcludedParentAssetFieldTypes(int index);
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @return A list containing the enum numeric values on the wire for excludedParentAssetFieldTypes.
*/
java.util.List<java.lang.Integer>
getExcludedParentAssetFieldTypesValueList();
/**
* <pre>
* The asset field types that should be excluded from this ad group. Asset
* links with these field types will not be inherited by this ad group from
* the upper levels.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType excluded_parent_asset_field_types = 54;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of excludedParentAssetFieldTypes at the given index.
*/
int getExcludedParentAssetFieldTypesValue(int index);
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @return A list containing the excludedParentAssetSetTypes.
*/
java.util.List<com.google.ads.googleads.v20.enums.AssetSetTypeEnum.AssetSetType> getExcludedParentAssetSetTypesList();
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @return The count of excludedParentAssetSetTypes.
*/
int getExcludedParentAssetSetTypesCount();
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @param index The index of the element to return.
* @return The excludedParentAssetSetTypes at the given index.
*/
com.google.ads.googleads.v20.enums.AssetSetTypeEnum.AssetSetType getExcludedParentAssetSetTypes(int index);
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @return A list containing the enum numeric values on the wire for excludedParentAssetSetTypes.
*/
java.util.List<java.lang.Integer>
getExcludedParentAssetSetTypesValueList();
/**
* <pre>
* The asset set types that should be excluded from this ad group. Asset set
* links with these types will not be inherited by this ad group from the
* upper levels.
* Location group types (GMB_DYNAMIC_LOCATION_GROUP,
* CHAIN_DYNAMIC_LOCATION_GROUP, and STATIC_LOCATION_GROUP) are child types of
* LOCATION_SYNC. Therefore, if LOCATION_SYNC is set for this field, all
* location group asset sets are not allowed to be linked to this ad group,
* and all Location Extension (LE) and Affiliate Location Extensions (ALE)
* will not be served under this ad group.
* Only LOCATION_SYNC is currently supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AssetSetTypeEnum.AssetSetType excluded_parent_asset_set_types = 58;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of excludedParentAssetSetTypes at the given index.
*/
int getExcludedParentAssetSetTypesValue(int index);
/**
* <pre>
* Output only. Provides aggregated view into why an ad group is not serving
* or not serving optimally.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupPrimaryStatusEnum.AdGroupPrimaryStatus primary_status = 62 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for primaryStatus.
*/
int getPrimaryStatusValue();
/**
* <pre>
* Output only. Provides aggregated view into why an ad group is not serving
* or not serving optimally.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AdGroupPrimaryStatusEnum.AdGroupPrimaryStatus primary_status = 62 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The primaryStatus.
*/
com.google.ads.googleads.v20.enums.AdGroupPrimaryStatusEnum.AdGroupPrimaryStatus getPrimaryStatus();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return A list containing the primaryStatusReasons.
*/
java.util.List<com.google.ads.googleads.v20.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason> getPrimaryStatusReasonsList();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The count of primaryStatusReasons.
*/
int getPrimaryStatusReasonsCount();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param index The index of the element to return.
* @return The primaryStatusReasons at the given index.
*/
com.google.ads.googleads.v20.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason getPrimaryStatusReasons(int index);
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return A list containing the enum numeric values on the wire for primaryStatusReasons.
*/
java.util.List<java.lang.Integer>
getPrimaryStatusReasonsValueList();
/**
* <pre>
* Output only. Provides reasons for why an ad group is not serving or not
* serving optimally.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.enums.AdGroupPrimaryStatusReasonEnum.AdGroupPrimaryStatusReason primary_status_reasons = 63 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of primaryStatusReasons at the given index.
*/
int getPrimaryStatusReasonsValue(int index);
/**
* <pre>
* Settings for Demand Gen ad groups.
* </pre>
*
* <code>.google.ads.googleads.v20.resources.AdGroup.DemandGenAdGroupSettings demand_gen_ad_group_settings = 91;</code>
* @return Whether the demandGenAdGroupSettings field is set.
*/
boolean hasDemandGenAdGroupSettings();
/**
* <pre>
* Settings for Demand Gen ad groups.
* </pre>
*
* <code>.google.ads.googleads.v20.resources.AdGroup.DemandGenAdGroupSettings demand_gen_ad_group_settings = 91;</code>
* @return The demandGenAdGroupSettings.
*/
com.google.ads.googleads.v20.resources.AdGroup.DemandGenAdGroupSettings getDemandGenAdGroupSettings();
/**
* <pre>
* Settings for Demand Gen ad groups.
* </pre>
*
* <code>.google.ads.googleads.v20.resources.AdGroup.DemandGenAdGroupSettings demand_gen_ad_group_settings = 91;</code>
*/
com.google.ads.googleads.v20.resources.AdGroup.DemandGenAdGroupSettingsOrBuilder getDemandGenAdGroupSettingsOrBuilder();
}
|
googleapis/google-cloud-java | 37,282 | java-valkey/proto-google-cloud-valkey-v1/src/main/java/com/google/cloud/memorystore/v1/InstanceOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/memorystore/v1/memorystore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.memorystore.v1;
public interface InstanceOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.memorystore.v1.Instance)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Optional. Immutable. Backups that stored in Cloud Storage buckets.
* The Cloud Storage buckets need to be the same region as the instances.
* Read permission is required to import from the provided Cloud Storage
* Objects.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.GcsBackupSource gcs_source = 23 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return Whether the gcsSource field is set.
*/
boolean hasGcsSource();
/**
*
*
* <pre>
* Optional. Immutable. Backups that stored in Cloud Storage buckets.
* The Cloud Storage buckets need to be the same region as the instances.
* Read permission is required to import from the provided Cloud Storage
* Objects.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.GcsBackupSource gcs_source = 23 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The gcsSource.
*/
com.google.cloud.memorystore.v1.Instance.GcsBackupSource getGcsSource();
/**
*
*
* <pre>
* Optional. Immutable. Backups that stored in Cloud Storage buckets.
* The Cloud Storage buckets need to be the same region as the instances.
* Read permission is required to import from the provided Cloud Storage
* Objects.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.GcsBackupSource gcs_source = 23 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
com.google.cloud.memorystore.v1.Instance.GcsBackupSourceOrBuilder getGcsSourceOrBuilder();
/**
*
*
* <pre>
* Optional. Immutable. Backups that generated and managed by memorystore
* service.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.ManagedBackupSource managed_backup_source = 24 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return Whether the managedBackupSource field is set.
*/
boolean hasManagedBackupSource();
/**
*
*
* <pre>
* Optional. Immutable. Backups that generated and managed by memorystore
* service.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.ManagedBackupSource managed_backup_source = 24 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The managedBackupSource.
*/
com.google.cloud.memorystore.v1.Instance.ManagedBackupSource getManagedBackupSource();
/**
*
*
* <pre>
* Optional. Immutable. Backups that generated and managed by memorystore
* service.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.ManagedBackupSource managed_backup_source = 24 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
com.google.cloud.memorystore.v1.Instance.ManagedBackupSourceOrBuilder
getManagedBackupSourceOrBuilder();
/**
*
*
* <pre>
* Identifier. Unique name of the instance.
* Format: projects/{project}/locations/{location}/instances/{instance}
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Identifier. Unique name of the instance.
* Format: projects/{project}/locations/{location}/instances/{instance}
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* Output only. Creation timestamp of the instance.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
boolean hasCreateTime();
/**
*
*
* <pre>
* Output only. Creation timestamp of the instance.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
com.google.protobuf.Timestamp getCreateTime();
/**
*
*
* <pre>
* Output only. Creation timestamp of the instance.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder();
/**
*
*
* <pre>
* Output only. Latest update timestamp of the instance.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the updateTime field is set.
*/
boolean hasUpdateTime();
/**
*
*
* <pre>
* Output only. Latest update timestamp of the instance.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The updateTime.
*/
com.google.protobuf.Timestamp getUpdateTime();
/**
*
*
* <pre>
* Output only. Latest update timestamp of the instance.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder();
/**
*
*
* <pre>
* Optional. Labels to represent user-provided metadata.
* </pre>
*
* <code>map<string, string> labels = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
int getLabelsCount();
/**
*
*
* <pre>
* Optional. Labels to represent user-provided metadata.
* </pre>
*
* <code>map<string, string> labels = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
boolean containsLabels(java.lang.String key);
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getLabels();
/**
*
*
* <pre>
* Optional. Labels to represent user-provided metadata.
* </pre>
*
* <code>map<string, string> labels = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
java.util.Map<java.lang.String, java.lang.String> getLabelsMap();
/**
*
*
* <pre>
* Optional. Labels to represent user-provided metadata.
* </pre>
*
* <code>map<string, string> labels = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
/* nullable */
java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional. Labels to represent user-provided metadata.
* </pre>
*
* <code>map<string, string> labels = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Output only. Current state of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
int getStateValue();
/**
*
*
* <pre>
* Output only. Current state of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
com.google.cloud.memorystore.v1.Instance.State getState();
/**
*
*
* <pre>
* Output only. Additional information about the state of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.StateInfo state_info = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the stateInfo field is set.
*/
boolean hasStateInfo();
/**
*
*
* <pre>
* Output only. Additional information about the state of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.StateInfo state_info = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The stateInfo.
*/
com.google.cloud.memorystore.v1.Instance.StateInfo getStateInfo();
/**
*
*
* <pre>
* Output only. Additional information about the state of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.StateInfo state_info = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.memorystore.v1.Instance.StateInfoOrBuilder getStateInfoOrBuilder();
/**
*
*
* <pre>
* Output only. System assigned, unique identifier for the instance.
* </pre>
*
* <code>
* string uid = 7 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... }
* </code>
*
* @return The uid.
*/
java.lang.String getUid();
/**
*
*
* <pre>
* Output only. System assigned, unique identifier for the instance.
* </pre>
*
* <code>
* string uid = 7 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for uid.
*/
com.google.protobuf.ByteString getUidBytes();
/**
*
*
* <pre>
* Optional. Number of replica nodes per shard. If omitted the default is 0
* replicas.
* </pre>
*
* <code>optional int32 replica_count = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the replicaCount field is set.
*/
boolean hasReplicaCount();
/**
*
*
* <pre>
* Optional. Number of replica nodes per shard. If omitted the default is 0
* replicas.
* </pre>
*
* <code>optional int32 replica_count = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The replicaCount.
*/
int getReplicaCount();
/**
*
*
* <pre>
* Optional. Immutable. Authorization mode of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.AuthorizationMode authorization_mode = 9 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The enum numeric value on the wire for authorizationMode.
*/
int getAuthorizationModeValue();
/**
*
*
* <pre>
* Optional. Immutable. Authorization mode of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.AuthorizationMode authorization_mode = 9 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The authorizationMode.
*/
com.google.cloud.memorystore.v1.Instance.AuthorizationMode getAuthorizationMode();
/**
*
*
* <pre>
* Optional. Immutable. In-transit encryption mode of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.TransitEncryptionMode transit_encryption_mode = 10 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The enum numeric value on the wire for transitEncryptionMode.
*/
int getTransitEncryptionModeValue();
/**
*
*
* <pre>
* Optional. Immutable. In-transit encryption mode of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.TransitEncryptionMode transit_encryption_mode = 10 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The transitEncryptionMode.
*/
com.google.cloud.memorystore.v1.Instance.TransitEncryptionMode getTransitEncryptionMode();
/**
*
*
* <pre>
* Optional. Number of shards for the instance.
* </pre>
*
* <code>int32 shard_count = 11 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The shardCount.
*/
int getShardCount();
/**
*
*
* <pre>
* Output only. Deprecated: Use the endpoints.connections.psc_auto_connection
* or endpoints.connections.psc_connection values instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.DiscoveryEndpoint discovery_endpoints = 12 [deprecated = true, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Deprecated
java.util.List<com.google.cloud.memorystore.v1.DiscoveryEndpoint> getDiscoveryEndpointsList();
/**
*
*
* <pre>
* Output only. Deprecated: Use the endpoints.connections.psc_auto_connection
* or endpoints.connections.psc_connection values instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.DiscoveryEndpoint discovery_endpoints = 12 [deprecated = true, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Deprecated
com.google.cloud.memorystore.v1.DiscoveryEndpoint getDiscoveryEndpoints(int index);
/**
*
*
* <pre>
* Output only. Deprecated: Use the endpoints.connections.psc_auto_connection
* or endpoints.connections.psc_connection values instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.DiscoveryEndpoint discovery_endpoints = 12 [deprecated = true, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Deprecated
int getDiscoveryEndpointsCount();
/**
*
*
* <pre>
* Output only. Deprecated: Use the endpoints.connections.psc_auto_connection
* or endpoints.connections.psc_connection values instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.DiscoveryEndpoint discovery_endpoints = 12 [deprecated = true, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Deprecated
java.util.List<? extends com.google.cloud.memorystore.v1.DiscoveryEndpointOrBuilder>
getDiscoveryEndpointsOrBuilderList();
/**
*
*
* <pre>
* Output only. Deprecated: Use the endpoints.connections.psc_auto_connection
* or endpoints.connections.psc_connection values instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.DiscoveryEndpoint discovery_endpoints = 12 [deprecated = true, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Deprecated
com.google.cloud.memorystore.v1.DiscoveryEndpointOrBuilder getDiscoveryEndpointsOrBuilder(
int index);
/**
*
*
* <pre>
* Optional. Machine type for individual nodes of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.NodeType node_type = 13 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for nodeType.
*/
int getNodeTypeValue();
/**
*
*
* <pre>
* Optional. Machine type for individual nodes of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.NodeType node_type = 13 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The nodeType.
*/
com.google.cloud.memorystore.v1.Instance.NodeType getNodeType();
/**
*
*
* <pre>
* Optional. Persistence configuration of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.PersistenceConfig persistence_config = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the persistenceConfig field is set.
*/
boolean hasPersistenceConfig();
/**
*
*
* <pre>
* Optional. Persistence configuration of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.PersistenceConfig persistence_config = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The persistenceConfig.
*/
com.google.cloud.memorystore.v1.PersistenceConfig getPersistenceConfig();
/**
*
*
* <pre>
* Optional. Persistence configuration of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.PersistenceConfig persistence_config = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.memorystore.v1.PersistenceConfigOrBuilder getPersistenceConfigOrBuilder();
/**
*
*
* <pre>
* Optional. Engine version of the instance.
* </pre>
*
* <code>string engine_version = 15 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The engineVersion.
*/
java.lang.String getEngineVersion();
/**
*
*
* <pre>
* Optional. Engine version of the instance.
* </pre>
*
* <code>string engine_version = 15 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for engineVersion.
*/
com.google.protobuf.ByteString getEngineVersionBytes();
/**
*
*
* <pre>
* Optional. User-provided engine configurations for the instance.
* </pre>
*
* <code>map<string, string> engine_configs = 16 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getEngineConfigsCount();
/**
*
*
* <pre>
* Optional. User-provided engine configurations for the instance.
* </pre>
*
* <code>map<string, string> engine_configs = 16 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
boolean containsEngineConfigs(java.lang.String key);
/** Use {@link #getEngineConfigsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getEngineConfigs();
/**
*
*
* <pre>
* Optional. User-provided engine configurations for the instance.
* </pre>
*
* <code>map<string, string> engine_configs = 16 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.Map<java.lang.String, java.lang.String> getEngineConfigsMap();
/**
*
*
* <pre>
* Optional. User-provided engine configurations for the instance.
* </pre>
*
* <code>map<string, string> engine_configs = 16 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
/* nullable */
java.lang.String getEngineConfigsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional. User-provided engine configurations for the instance.
* </pre>
*
* <code>map<string, string> engine_configs = 16 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.lang.String getEngineConfigsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Output only. Configuration of individual nodes of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.NodeConfig node_config = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the nodeConfig field is set.
*/
boolean hasNodeConfig();
/**
*
*
* <pre>
* Output only. Configuration of individual nodes of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.NodeConfig node_config = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The nodeConfig.
*/
com.google.cloud.memorystore.v1.NodeConfig getNodeConfig();
/**
*
*
* <pre>
* Output only. Configuration of individual nodes of the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.NodeConfig node_config = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.memorystore.v1.NodeConfigOrBuilder getNodeConfigOrBuilder();
/**
*
*
* <pre>
* Optional. Immutable. Zone distribution configuration of the instance for
* node allocation.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.ZoneDistributionConfig zone_distribution_config = 18 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return Whether the zoneDistributionConfig field is set.
*/
boolean hasZoneDistributionConfig();
/**
*
*
* <pre>
* Optional. Immutable. Zone distribution configuration of the instance for
* node allocation.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.ZoneDistributionConfig zone_distribution_config = 18 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The zoneDistributionConfig.
*/
com.google.cloud.memorystore.v1.ZoneDistributionConfig getZoneDistributionConfig();
/**
*
*
* <pre>
* Optional. Immutable. Zone distribution configuration of the instance for
* node allocation.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.ZoneDistributionConfig zone_distribution_config = 18 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
com.google.cloud.memorystore.v1.ZoneDistributionConfigOrBuilder
getZoneDistributionConfigOrBuilder();
/**
*
*
* <pre>
* Optional. If set to true deletion of the instance will fail.
* </pre>
*
* <code>optional bool deletion_protection_enabled = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the deletionProtectionEnabled field is set.
*/
boolean hasDeletionProtectionEnabled();
/**
*
*
* <pre>
* Optional. If set to true deletion of the instance will fail.
* </pre>
*
* <code>optional bool deletion_protection_enabled = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The deletionProtectionEnabled.
*/
boolean getDeletionProtectionEnabled();
/**
*
*
* <pre>
* Optional. Immutable. Deprecated: Use the
* endpoints.connections.psc_auto_connection value instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAutoConnection psc_auto_connections = 20 [deprecated = true, (.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
@java.lang.Deprecated
java.util.List<com.google.cloud.memorystore.v1.PscAutoConnection> getPscAutoConnectionsList();
/**
*
*
* <pre>
* Optional. Immutable. Deprecated: Use the
* endpoints.connections.psc_auto_connection value instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAutoConnection psc_auto_connections = 20 [deprecated = true, (.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
@java.lang.Deprecated
com.google.cloud.memorystore.v1.PscAutoConnection getPscAutoConnections(int index);
/**
*
*
* <pre>
* Optional. Immutable. Deprecated: Use the
* endpoints.connections.psc_auto_connection value instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAutoConnection psc_auto_connections = 20 [deprecated = true, (.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
@java.lang.Deprecated
int getPscAutoConnectionsCount();
/**
*
*
* <pre>
* Optional. Immutable. Deprecated: Use the
* endpoints.connections.psc_auto_connection value instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAutoConnection psc_auto_connections = 20 [deprecated = true, (.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
@java.lang.Deprecated
java.util.List<? extends com.google.cloud.memorystore.v1.PscAutoConnectionOrBuilder>
getPscAutoConnectionsOrBuilderList();
/**
*
*
* <pre>
* Optional. Immutable. Deprecated: Use the
* endpoints.connections.psc_auto_connection value instead.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAutoConnection psc_auto_connections = 20 [deprecated = true, (.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
@java.lang.Deprecated
com.google.cloud.memorystore.v1.PscAutoConnectionOrBuilder getPscAutoConnectionsOrBuilder(
int index);
/**
*
*
* <pre>
* Output only. Service attachment details to configure PSC connections.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAttachmentDetail psc_attachment_details = 21 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.memorystore.v1.PscAttachmentDetail> getPscAttachmentDetailsList();
/**
*
*
* <pre>
* Output only. Service attachment details to configure PSC connections.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAttachmentDetail psc_attachment_details = 21 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.memorystore.v1.PscAttachmentDetail getPscAttachmentDetails(int index);
/**
*
*
* <pre>
* Output only. Service attachment details to configure PSC connections.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAttachmentDetail psc_attachment_details = 21 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getPscAttachmentDetailsCount();
/**
*
*
* <pre>
* Output only. Service attachment details to configure PSC connections.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAttachmentDetail psc_attachment_details = 21 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<? extends com.google.cloud.memorystore.v1.PscAttachmentDetailOrBuilder>
getPscAttachmentDetailsOrBuilderList();
/**
*
*
* <pre>
* Output only. Service attachment details to configure PSC connections.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.PscAttachmentDetail psc_attachment_details = 21 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.memorystore.v1.PscAttachmentDetailOrBuilder getPscAttachmentDetailsOrBuilder(
int index);
/**
*
*
* <pre>
* Optional. Endpoints for the instance.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.Instance.InstanceEndpoint endpoints = 25 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<com.google.cloud.memorystore.v1.Instance.InstanceEndpoint> getEndpointsList();
/**
*
*
* <pre>
* Optional. Endpoints for the instance.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.Instance.InstanceEndpoint endpoints = 25 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.memorystore.v1.Instance.InstanceEndpoint getEndpoints(int index);
/**
*
*
* <pre>
* Optional. Endpoints for the instance.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.Instance.InstanceEndpoint endpoints = 25 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getEndpointsCount();
/**
*
*
* <pre>
* Optional. Endpoints for the instance.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.Instance.InstanceEndpoint endpoints = 25 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<? extends com.google.cloud.memorystore.v1.Instance.InstanceEndpointOrBuilder>
getEndpointsOrBuilderList();
/**
*
*
* <pre>
* Optional. Endpoints for the instance.
* </pre>
*
* <code>
* repeated .google.cloud.memorystore.v1.Instance.InstanceEndpoint endpoints = 25 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.memorystore.v1.Instance.InstanceEndpointOrBuilder getEndpointsOrBuilder(
int index);
/**
*
*
* <pre>
* Optional. The mode config for the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.Mode mode = 26 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for mode.
*/
int getModeValue();
/**
*
*
* <pre>
* Optional. The mode config for the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.Instance.Mode mode = 26 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The mode.
*/
com.google.cloud.memorystore.v1.Instance.Mode getMode();
/**
*
*
* <pre>
* Optional. Input only. Ondemand maintenance for the instance.
* </pre>
*
* <code>
* optional bool ondemand_maintenance = 28 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = INPUT_ONLY];
* </code>
*
* @return Whether the ondemandMaintenance field is set.
*/
boolean hasOndemandMaintenance();
/**
*
*
* <pre>
* Optional. Input only. Ondemand maintenance for the instance.
* </pre>
*
* <code>
* optional bool ondemand_maintenance = 28 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = INPUT_ONLY];
* </code>
*
* @return The ondemandMaintenance.
*/
boolean getOndemandMaintenance();
/**
*
*
* <pre>
* Optional. The maintenance policy for the instance. If not provided,
* the maintenance event will be performed based on Memorystore
* internal rollout schedule.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.MaintenancePolicy maintenance_policy = 31 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maintenancePolicy field is set.
*/
boolean hasMaintenancePolicy();
/**
*
*
* <pre>
* Optional. The maintenance policy for the instance. If not provided,
* the maintenance event will be performed based on Memorystore
* internal rollout schedule.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.MaintenancePolicy maintenance_policy = 31 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maintenancePolicy.
*/
com.google.cloud.memorystore.v1.MaintenancePolicy getMaintenancePolicy();
/**
*
*
* <pre>
* Optional. The maintenance policy for the instance. If not provided,
* the maintenance event will be performed based on Memorystore
* internal rollout schedule.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.MaintenancePolicy maintenance_policy = 31 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.memorystore.v1.MaintenancePolicyOrBuilder getMaintenancePolicyOrBuilder();
/**
*
*
* <pre>
* Output only. Published maintenance schedule.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.MaintenanceSchedule maintenance_schedule = 32 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the maintenanceSchedule field is set.
*/
boolean hasMaintenanceSchedule();
/**
*
*
* <pre>
* Output only. Published maintenance schedule.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.MaintenanceSchedule maintenance_schedule = 32 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The maintenanceSchedule.
*/
com.google.cloud.memorystore.v1.MaintenanceSchedule getMaintenanceSchedule();
/**
*
*
* <pre>
* Output only. Published maintenance schedule.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.MaintenanceSchedule maintenance_schedule = 32 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.memorystore.v1.MaintenanceScheduleOrBuilder getMaintenanceScheduleOrBuilder();
/**
*
*
* <pre>
* Optional. The config for cross instance replication.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.CrossInstanceReplicationConfig cross_instance_replication_config = 33 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the crossInstanceReplicationConfig field is set.
*/
boolean hasCrossInstanceReplicationConfig();
/**
*
*
* <pre>
* Optional. The config for cross instance replication.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.CrossInstanceReplicationConfig cross_instance_replication_config = 33 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The crossInstanceReplicationConfig.
*/
com.google.cloud.memorystore.v1.CrossInstanceReplicationConfig
getCrossInstanceReplicationConfig();
/**
*
*
* <pre>
* Optional. The config for cross instance replication.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.CrossInstanceReplicationConfig cross_instance_replication_config = 33 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.memorystore.v1.CrossInstanceReplicationConfigOrBuilder
getCrossInstanceReplicationConfigOrBuilder();
/**
*
*
* <pre>
* Optional. If true, instance endpoints that are created and registered by
* customers can be deleted asynchronously. That is, such an instance endpoint
* can be de-registered before the forwarding rules in the instance endpoint
* are deleted.
* </pre>
*
* <code>
* optional bool async_instance_endpoints_deletion_enabled = 44 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the asyncInstanceEndpointsDeletionEnabled field is set.
*/
boolean hasAsyncInstanceEndpointsDeletionEnabled();
/**
*
*
* <pre>
* Optional. If true, instance endpoints that are created and registered by
* customers can be deleted asynchronously. That is, such an instance endpoint
* can be de-registered before the forwarding rules in the instance endpoint
* are deleted.
* </pre>
*
* <code>
* optional bool async_instance_endpoints_deletion_enabled = 44 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The asyncInstanceEndpointsDeletionEnabled.
*/
boolean getAsyncInstanceEndpointsDeletionEnabled();
/**
*
*
* <pre>
* Output only. The backup collection full resource name. Example:
* projects/{project}/locations/{location}/backupCollections/{collection}
* </pre>
*
* <code>
* optional string backup_collection = 47 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return Whether the backupCollection field is set.
*/
boolean hasBackupCollection();
/**
*
*
* <pre>
* Output only. The backup collection full resource name. Example:
* projects/{project}/locations/{location}/backupCollections/{collection}
* </pre>
*
* <code>
* optional string backup_collection = 47 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The backupCollection.
*/
java.lang.String getBackupCollection();
/**
*
*
* <pre>
* Output only. The backup collection full resource name. Example:
* projects/{project}/locations/{location}/backupCollections/{collection}
* </pre>
*
* <code>
* optional string backup_collection = 47 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for backupCollection.
*/
com.google.protobuf.ByteString getBackupCollectionBytes();
/**
*
*
* <pre>
* Optional. The automated backup config for the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.AutomatedBackupConfig automated_backup_config = 48 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the automatedBackupConfig field is set.
*/
boolean hasAutomatedBackupConfig();
/**
*
*
* <pre>
* Optional. The automated backup config for the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.AutomatedBackupConfig automated_backup_config = 48 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The automatedBackupConfig.
*/
com.google.cloud.memorystore.v1.AutomatedBackupConfig getAutomatedBackupConfig();
/**
*
*
* <pre>
* Optional. The automated backup config for the instance.
* </pre>
*
* <code>
* .google.cloud.memorystore.v1.AutomatedBackupConfig automated_backup_config = 48 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.memorystore.v1.AutomatedBackupConfigOrBuilder
getAutomatedBackupConfigOrBuilder();
com.google.cloud.memorystore.v1.Instance.ImportSourcesCase getImportSourcesCase();
}
|
googleapis/google-cloud-java | 37,472 | java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1alpha1;
/**
*
*
* <pre>
* Response message for the ListCatalogs method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse}
*/
public final class ListCatalogsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse)
ListCatalogsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCatalogsResponse.newBuilder() to construct.
private ListCatalogsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCatalogsResponse() {
catalogs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCatalogsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.class,
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.Builder.class);
}
public static final int CATALOGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Catalog> catalogs_;
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Catalog> getCatalogsList() {
return catalogs_;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>
getCatalogsOrBuilderList() {
return catalogs_;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public int getCatalogsCount() {
return catalogs_.size();
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalogs(int index) {
return catalogs_.get(index);
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogsOrBuilder(
int index) {
return catalogs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < catalogs_.size(); i++) {
output.writeMessage(1, catalogs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < catalogs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, catalogs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse other =
(com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) obj;
if (!getCatalogsList().equals(other.getCatalogsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCatalogsCount() > 0) {
hash = (37 * hash) + CATALOGS_FIELD_NUMBER;
hash = (53 * hash) + getCatalogsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the ListCatalogs method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse)
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.class,
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (catalogsBuilder_ == null) {
catalogs_ = java.util.Collections.emptyList();
} else {
catalogs_ = null;
catalogsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse
getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse build() {
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse buildPartial() {
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result =
new com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result) {
if (catalogsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
catalogs_ = java.util.Collections.unmodifiableList(catalogs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.catalogs_ = catalogs_;
} else {
result.catalogs_ = catalogsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse other) {
if (other
== com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.getDefaultInstance())
return this;
if (catalogsBuilder_ == null) {
if (!other.catalogs_.isEmpty()) {
if (catalogs_.isEmpty()) {
catalogs_ = other.catalogs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCatalogsIsMutable();
catalogs_.addAll(other.catalogs_);
}
onChanged();
}
} else {
if (!other.catalogs_.isEmpty()) {
if (catalogsBuilder_.isEmpty()) {
catalogsBuilder_.dispose();
catalogsBuilder_ = null;
catalogs_ = other.catalogs_;
bitField0_ = (bitField0_ & ~0x00000001);
catalogsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCatalogsFieldBuilder()
: null;
} else {
catalogsBuilder_.addAllMessages(other.catalogs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.bigquery.biglake.v1alpha1.Catalog m =
input.readMessage(
com.google.cloud.bigquery.biglake.v1alpha1.Catalog.parser(),
extensionRegistry);
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(m);
} else {
catalogsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Catalog> catalogs_ =
java.util.Collections.emptyList();
private void ensureCatalogsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
catalogs_ =
new java.util.ArrayList<com.google.cloud.bigquery.biglake.v1alpha1.Catalog>(catalogs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Catalog,
com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>
catalogsBuilder_;
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Catalog> getCatalogsList() {
if (catalogsBuilder_ == null) {
return java.util.Collections.unmodifiableList(catalogs_);
} else {
return catalogsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public int getCatalogsCount() {
if (catalogsBuilder_ == null) {
return catalogs_.size();
} else {
return catalogsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalogs(int index) {
if (catalogsBuilder_ == null) {
return catalogs_.get(index);
} else {
return catalogsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder setCatalogs(
int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.set(index, value);
onChanged();
} else {
catalogsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder setCatalogs(
int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.set(index, builderForValue.build());
onChanged();
} else {
catalogsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.add(value);
onChanged();
} else {
catalogsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(
int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.add(index, value);
onChanged();
} else {
catalogsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(
com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(builderForValue.build());
onChanged();
} else {
catalogsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(
int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(index, builderForValue.build());
onChanged();
} else {
catalogsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder addAllCatalogs(
java.lang.Iterable<? extends com.google.cloud.bigquery.biglake.v1alpha1.Catalog> values) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, catalogs_);
onChanged();
} else {
catalogsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder clearCatalogs() {
if (catalogsBuilder_ == null) {
catalogs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
catalogsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public Builder removeCatalogs(int index) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.remove(index);
onChanged();
} else {
catalogsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder getCatalogsBuilder(
int index) {
return getCatalogsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogsOrBuilder(
int index) {
if (catalogsBuilder_ == null) {
return catalogs_.get(index);
} else {
return catalogsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>
getCatalogsOrBuilderList() {
if (catalogsBuilder_ != null) {
return catalogsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(catalogs_);
}
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder addCatalogsBuilder() {
return getCatalogsFieldBuilder()
.addBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance());
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder addCatalogsBuilder(
int index) {
return getCatalogsFieldBuilder()
.addBuilder(
index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance());
}
/**
*
*
* <pre>
* The catalogs from the specified project.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder>
getCatalogsBuilderList() {
return getCatalogsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Catalog,
com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>
getCatalogsFieldBuilder() {
if (catalogsBuilder_ == null) {
catalogsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Catalog,
com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>(
catalogs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
catalogs_ = null;
}
return catalogsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse)
private static final com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse();
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCatalogsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListCatalogsResponse>() {
@java.lang.Override
public ListCatalogsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCatalogsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCatalogsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/nosql | 37,738 | kvmain/src/main/java/oracle/kv/impl/sna/StorageNodeAgentInterface.java | /*-
* Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved.
*
* This file was distributed by Oracle as part of a version of Oracle NoSQL
* Database made available at:
*
* http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
*
* Please see the LICENSE file included in the top-level directory of the
* appropriate version of Oracle NoSQL Database for a copy of the license and
* additional information.
*/
package oracle.kv.impl.sna;
import java.rmi.RemoteException;
import java.util.List;
import java.util.Set;
import oracle.kv.impl.admin.param.SecurityParams.KrbPrincipalInfo;
import oracle.kv.impl.measurement.ServiceStatusChange;
import oracle.kv.impl.metadata.Metadata;
import oracle.kv.impl.metadata.MetadataInfo;
import oracle.kv.impl.param.LoadParameters;
import oracle.kv.impl.param.ParameterMap;
import oracle.kv.impl.rep.monitor.StatsPacket;
import oracle.kv.impl.security.AuthContext;
import oracle.kv.impl.sna.StorageNodeAgentAPI.CredentialHashes;
import oracle.kv.impl.sna.masterBalance.MasterBalancingInterface;
import oracle.kv.impl.topo.AdminId;
import oracle.kv.impl.topo.ArbNode;
import oracle.kv.impl.topo.ArbNodeId;
import oracle.kv.impl.topo.RepNodeId;
import oracle.kv.impl.topo.ResourceId;
import oracle.kv.impl.util.registry.VersionedRemote;
import com.sleepycat.je.rep.StateChangeEvent;
/**
* The interface to the Storage Node Agent. The SNA is run as a process on
* each of the Storage Nodes. It provides process control for each Storage
* Node as well as a mechanism for passing parameters to the processes it
* controls.
*
* Before a StorageNodeAgent can be used as part of a store, it must be
* registered by calling the {@link #register} method. Until an SNA
* is registered, all other methods will throw an exception.
*
* Exceptions thrown from this interface are nearly always indicative of a
* serious problem such as a corrupt configuration or network problem. In
* general the "worker" methods try hard to do what they've been asked. Most
* state-changing operations are idempotent in that they can be retried and
* will ignore the fact that it may be a retry. This handles the situation
* where the caller may have exited before knowing the resulting state of the
* call.
*
* A number of the methods imply an expected state when called. For example,
* calling createRepNode() implies that the caller expects that the RepNode in
* question has not already been created. Rather than throwing an exception
* the method should log the situation and return a value to the caller
* indicating that things were not as expected. The sense of the return values
* used is true for "the implied state was correct" and false for "the implied
* state was not correct." The return values do not indicate success or
* failure of the operation. If the operation does not throw an exception it
* succeeded.
*/
public interface StorageNodeAgentInterface extends
VersionedRemote, MasterBalancingInterface {
/**
* Returns the service status associated with the SNA
*
* @since 3.0
*/
public StorageNodeStatus ping(AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Registers this Storage Node to be part of a store. This method should be
* called at most once during the lifetime of a Storage Node. All other
* methods will fail until this method has been called. Uses the bootstrap
* hostname and port.
*
* After this method is called the handle used to access it will no longer
* be valid and will need to be re-acquired. The name of the service will
* also have changed to its permanent name.
*
* @param globalParams kvstore wide settings, including the store name.
* @param storageNodeParams parameters for the new storage node required
* for it to set up normal service, including registry port and storage
* node id.
* @param hostingAdmin set to true if this Storage Node is physically
* hosting the Admin for the store.
*
* @return List<ParameterMap> which has two parameter maps, one for basic
* storage node information and one that is the map of mount points. This
* information is destined for the caller's copy of StorageNodeParams.
*
* @since 3.0
*/
public List<ParameterMap> register(ParameterMap globalParams,
ParameterMap storageNodeParams,
boolean hostingAdmin,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Stops a running Storage Node Agent, optionally stopping all running
* services it is managing, without specifying a reason.
*
* @param stopServices if true stop running services
*
* @since 3.0
* @deprecated since 22.3
*/
@Deprecated
public void shutdown(boolean stopServices,
boolean force,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Stops a running Storage Node Agent, optionally stopping all running
* services it is managing.
*
* @param stopServices if true stop running services
* @param force force a shutdown
* @param reason the reason for the shutdown, or null
* @since 22.3
*/
public void shutdown(boolean stopServices,
boolean force,
String reason,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Creates and starts a Admin instance in the store. This will cause a new
* process to be created containing the Admin. This should be called for
* each instance up the Admin, up to the desired Admin replication factor.
* The Storage Node Agent will continue to start this Admin instance upon
* future restarts unless it is explicitly stopped.
*
* @param adminParams the configuration parameters of this Admin instance
*
* @return true if the Admin is successfully created.
*
* @throws RuntimeException if the operation failed.
*
* @since 3.0
*/
public boolean createAdmin(ParameterMap adminParams,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Starts a Admin instance that has already been defined on this node. The
* Admin will be started automatically by this StorageNodeAgent if the
* Storage Node is restarted.
*
* @return true if the operation succeeds.
*
* @throws RuntimeException if the operation fails or the service does not
* exist.
*
* @since 3.0
*/
public boolean startAdmin(AuthContext authCtx, short serialVersion)
throws RemoteException;
/**
* Stops a Admin instance that has already been defined on this node,
* without specifying a reason. The Admin will no longer be started
* automatically if the Storage Node is restarted.
*
* @param force force a shutdown
*
* @return true if the Admin was running, false if it was not.
*
* @throws RuntimeException if the operation fails or the service does not
* exist
*
* @since 3.0
* @deprecated since 22.3
*/
@Deprecated
public boolean stopAdmin(boolean force,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Stops a Admin instance that has already been defined on this node. The
* Admin will no longer be started automatically if the Storage Node is
* restarted.
*
* @param force force a shutdown
* @param reason the reason for the shutdown, or null
* @return true if the Admin was running, false if it was not.
* @throws RuntimeException if the operation fails or the service does not
* exist
* @since 22.3
*/
public boolean stopAdmin(boolean force,
String reason,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Permanently removes an Admin instance running on this Storage Node,
* without specifying a reason. Since the StorageNodeAgent cannot know if
* this is the only Admin instance or not, care should be taken by the
* Admin itself to prevent removal of the last Admin instance. This method
* will stop the admin if it is running.
*
* @param adminId the unique identifier of the Admin
*
* @param deleteData true if the data stored on disk for this Admin
* should be deleted
*
* @return true if the Admin existed, false if it did not.
*
* @throws RuntimeException if the operation failed.
*
* @since 3.0
* @deprecated since 22.3
*/
@Deprecated
public boolean destroyAdmin(AdminId adminId,
boolean deleteData,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Permanently removes an Admin instance running on this Storage Node.
* Since the StorageNodeAgent cannot know if this is the only Admin
* instance or not, care should be taken by the Admin itself to prevent
* removal of the last Admin instance. This method will stop the admin if
* it is running.
*
* @param adminId the unique identifier of the Admin
* @param deleteData true if the data stored on disk for this Admin
* should be deleted
* @param reason the reason for the shutdown, or null
*
* @return true if the Admin existed, false if it did not.
*
* @throws RuntimeException if the operation failed.
*
* @since 22.3
*/
public boolean destroyAdmin(AdminId adminId,
boolean deleteData,
String reason,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Query whether a give RepNode has been defined on this Storage Node, as
* indicated by its configuration existing in the store's configuration
* file. This is not an indication of its runtime status.
*
* @param repNodeId the unique identifier of the RepNode
*
* @return true if the specified RepNode exists in the configuration file
*
* @since 3.0
*/
public boolean repNodeExists(RepNodeId repNodeId,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Creates and starts a {@link oracle.kv.impl.rep.RepNode} instance
* on this Storage Node. This will cause a new process to be started to
* run the RepNode. The StorageNodeAgent will continue to start this
* RepNode if the Storage Node is restarted unless the RepNode is stopped
* explicitly.
*
* Once the configuration file is written so that a restart of the SNA will
* also start the RepNode this call will unconditionally succeed, even if
* it cannot actually start or contact the RepNode itself. This is so that
* the state of the SNA is consistent with the topology in the admin
* database.
*
* @param repNodeParams the configuration of the RepNode to create
*
* @param metadataSet the metadata set for the RepNode
*
* @return true if the RepNode is successfully created.
*
* @throws RuntimeException if the operation failed.
*
* @since 3.0
*/
public boolean createRepNode(ParameterMap repNodeParams,
Set<Metadata<? extends MetadataInfo>> metadataSet,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Starts a {@link oracle.kv.impl.rep.RepNode} that has already been
* defined on this Storage Node. The RepNode will be started automatically
* if the Storage Node is restarted or the RepNode exits unexpectedly.
*
* @param repNodeId the unique identifier of the RepNode to start
*
* @return true if the operation succeeds.
*
* @throws RuntimeException if the operation fails or the service does not
* exist.
*
* @since 3.0
*/
public boolean startRepNode(RepNodeId repNodeId,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Stops a {@link oracle.kv.impl.rep.RepNode} that has already been defined
* on this Storage Node, without specifying a reason. The RepNode will not
* be started if the Storage node is restarted until {@link #startRepNode}
* is called.
*
* @param repNodeId the unique identifier of the RepNode to stop
*
* @param force force a shutdown
*
* @param chkStream true if check stream clients
*
* @return true if the RepNode was running, false if it was not.
*
* @throws RuntimeException if the operation failed.
*
* @since 19.1
* @deprecated since 22.3
*/
@Deprecated
public boolean stopRepNode(RepNodeId repNodeId,
boolean force,
boolean chkStream,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Stops a {@link oracle.kv.impl.rep.RepNode} that has already been defined
* on this Storage Node. The RepNode will not be started if the Storage
* node is restarted until {@link #startRepNode} is called.
*
* @param repNodeId the unique identifier of the RepNode to stop
* @param force force a shutdown
* @param chkStream true if check stream clients
* @param reason the reason for the shutdown, or null
* @return true if the RepNode was running, false if it was not.
* @throws RuntimeException if the operation failed.
* @since 22.3
*/
public boolean stopRepNode(RepNodeId repNodeId,
boolean force,
boolean chkStream,
String reason,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Permanently removes the {@link oracle.kv.impl.rep.RepNode} with
* the specified RepNodeId, without specifying a reason.
*
* @param repNodeId the unique identifier of the RepNode to destroy
* @param deleteData true if the data stored on disk for this RepNode
* should be deleted
*
* @return true if the RepNode is successfully destroyed. This will be the
* case if it does not exist in the first place.
*
* @throws RuntimeException if the operation failed.
*
* @since 3.0
* @deprecated since 22.3
*/
@Deprecated
public boolean destroyRepNode(RepNodeId repNodeId,
boolean deleteData,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Permanently removes the {@link oracle.kv.impl.rep.RepNode} with
* the specified RepNodeId.
*
* @param repNodeId the unique identifier of the RepNode to destroy
* @param deleteData true if the data stored on disk for this RepNode
* should be deleted
* @param reason the reason for the shutdown, or null
*
* @return true if the RepNode is successfully destroyed. This will be the
* case if it does not exist in the first place.
*
* @throws RuntimeException if the operation failed.
*
* @since 22.3
*/
public boolean destroyRepNode(RepNodeId repNodeId,
boolean deleteData,
String reason,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Modifies the parameters of a (@link oracle.kv.impl.rep.RepNode}
* RepNode managed by this StorageNode. The new parameters will be written
* out to the storage node's configuration file. If the service needs
* notification of the new parameters that is done by the admin/planner.
*
* @param repNodeParams the new parameters to configure the rep node. This
* is a full set of replacement parameters, not partial.
*
* @throws RuntimeException if the RepNode is not configured or the
* operation failed.
*
* @since 3.0
*/
public void newRepNodeParameters(ParameterMap repNodeParams,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Query whether a give ArbNode has been defined on this Storage Node, as
* indicated by its configuration existing in the store's configuration
* file. This is not an indication of its runtime status.
*
* @param arbNodeId the unique identifier of the ArbNode
*
* @return true if the specified ArbNode exists in the configuration file
*
* @since 4.0
*/
public boolean arbNodeExists(ArbNodeId arbNodeId,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Creates and starts a {@link ArbNode} instance
* on this Storage Node. This will cause a new process to be started to
* run the ArbNode. The StorageNodeAgent will continue to start this
* ArbNode if the Storage Node is restarted unless the ArbNode is stopped
* explicitly.
*
* Once the configuration file is written so that a restart of the SNA will
* also start the ArbNode this call will unconditionally succeed, even if
* it cannot actually start or contact the ArbNode itself. This is so that
* the state of the SNA is consistent with the topology in the admin
* database.
*
* @param arbNodeParams the configuration of the ArbNode to create
*
* @return true if the ArbNode is successfully created.
*
* @throws RuntimeException if the operation failed.
*
* @since 4.0
*/
public boolean createArbNode(ParameterMap arbNodeParams,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Starts a {@link ArbNode} that has already been
* defined on this Storage Node. The ArbNode will be started automatically
* if the Storage Node is restarted or the ArbNode exits unexpectedly.
*
* @param arbNodeId the unique identifier of the ArbNode to start
*
* @return true if the operation succeeds.
*
* @throws RuntimeException if the operation fails or the service does not
* exist.
*
* @since 4.0
*/
public boolean startArbNode(ArbNodeId arbNodeId,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Stops a {@link ArbNode} that has already been defined on this Storage
* Node, without specifying a reason. The ArbNode will not be started if
* the Storage node is restarted until {@link #startArbNode} is called.
*
* @param arbNodeId the unique identifier of the ArbNode to stop
*
* @param force force a shutdown
*
* @return true if the ArbNode was running, false if it was not.
*
* @throws RuntimeException if the operation failed.
*
* @since 4.0
* @deprecated since 22.3
*/
@Deprecated
public boolean stopArbNode(ArbNodeId arbNodeId,
boolean force,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Stops a {@link ArbNode} that has already been defined on this Storage
* Node. The ArbNode will not be started if the Storage node is restarted
* until {@link #startArbNode} is called.
*
* @param arbNodeId the unique identifier of the ArbNode to stop
* @param force force a shutdown
* @param reason the reason for the shutdown, or null
* @return true if the ArbNode was running, false if it was not.
* @throws RuntimeException if the operation failed.
* @since 22.3
*/
public boolean stopArbNode(ArbNodeId arbNodeId,
boolean force,
String reason,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Permanently removes the {@link ArbNode} with
* the specified ArbNodeId, without specifying a reason.
*
* @param arbNodeId the unique identifier of the ArbNode to destroy
*
* @param deleteData true if the data stored on disk for this ArbNode
* should be deleted
*
* @return true if the ArbNode is successfully destroyed. This will be the
* case if it does not exist in the first place.
*
* @throws RuntimeException if the operation failed.
*
* @since 4.0
* @deprecated since 22.3
*/
@Deprecated
public boolean destroyArbNode(ArbNodeId arbNodeId,
boolean deleteData,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Permanently removes the {@link ArbNode} with
* the specified ArbNodeId.
*
* @param arbNodeId the unique identifier of the ArbNode to destroy
* @param deleteData true if the data stored on disk for this ArbNode
* should be deleted
* @param reason the reason for the shutdown, or null
*
* @return true if the ArbNode is successfully destroyed. This will be the
* case if it does not exist in the first place.
*
* @throws RuntimeException if the operation failed.
*
* @since 22.3
*/
public boolean destroyArbNode(ArbNodeId arbNodeId,
boolean deleteData,
String reason,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Checks the specified parameters. Throws an IllegalArgumentException if
* a parameter is found to be invalid. If id is non-null then the parameters
* are for that service. Otherwise the global parameters are checked.
*
* @param params parameter map to check
* @param id the service associated with the parameters or null
*
* @throws IllegalArgumentException if an invalid parameter is found
*
* @since 4.3
*/
public void checkParameters(ParameterMap params, ResourceId id,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Modifies the parameters of a (@link ArbNode}
* ArbNode managed by this StorageNode. The new parameters will be written
* out to the storage node's configuration file. If the service needs
* notification of the new parameters that is done by the admin/planner.
*
* @param arbNodeParams the new parameters to configure the arb node. This
* is a full set of replacement parameters, not partial.
*
* @throws RuntimeException if the ArbNode is not configured or the
* operation failed.
*
* @since 4.0
*/
public void newArbNodeParameters(ParameterMap arbNodeParams,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Modifies the parameters of an (@link oracle.kv.impl.admin.Admin}
* Admin managed by this StorageNode. The new parameters will be written
* out to the storage node's configuration file. Any required notification
* is done by the admin/planner.
*
* @param adminParams the new parameters to configure the admin. This is a
* full set of replacement parameters, not partial.
*
* @throws RuntimeException if the admin is not configured or the
* operation failed.
*
* @since 3.0
*/
public void newAdminParameters(ParameterMap adminParams,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Modifies the parameters of the current Storage Node. The new
* parameters will be written out to the storage node's configuration file
* and if also present, the bootstrap config file.
*
* @param params the new parameters to configure the storage
* node. This can be a partial set but must include both bootstrap and
* StorageNodeParams to change. It may also be a map of mount points to
* be applied to the storage node and the bootstrap parameters.
*
* @throws RuntimeException if the StorageNode is not configured or the
* operation failed.
*
* @since 3.0
*/
public void newStorageNodeParameters(ParameterMap params,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Modifies the global parameters of the current Storage Node. The new
* parameters will be written out to the storage node's configuration file.
* Any required notification is done by the admin/planner.
*
* @param params the new store-wide global parameters
*
* @throws RuntimeException if the StorageNode is not configured or the
* operation failed.
*
* @since 3.0
*/
public void newGlobalParameters(ParameterMap params,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Get SNA parameters.
*
* @since 3.0
*/
public LoadParameters getParams(AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Returns current information from the SN.
*/
StorageNodeInfo getInfo(AuthContext authCtx, short serialVersion)
throws RemoteException;
/**
* Get SNA Kerberos service principal information.
*
* @since 3.5
*/
public KrbPrincipalInfo getKrbPrincipalInfo(AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Returns information about service start problems if the service is
* started as a process. Problems may be JVM initialization or
* synchronous failures from the service itself during startup.
*
* @param rid is the ResourceId of the service
*
* @return the buffer of startup information if there was a problem. Null
* is returned if there was no startup problem.
*
* @throws RuntimeException if the service does not exist.
*
* @since 3.0
*/
public StringBuilder getStartupBuffer(ResourceId rid,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Snapshot methods.
*/
/**
* Create the named snapshot.
*
* @since 3.0
*/
public void createSnapshot(RepNodeId rnid,
String name,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Create the named snapshot.
* @since 3.0
*/
public void createSnapshot(AdminId aid, String name,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Remove the named snapshot from all managed services on this storage node
*
* @since 3.0
*/
public void removeSnapshot(RepNodeId rnid, String name,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* @since 3.0
*/
public void removeSnapshot(AdminId aid, String name,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Remove all snapshots all managed services on this storage node
*
* @since 3.0
*/
public void removeAllSnapshots(RepNodeId rnid,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* @since 3.0
*/
public void removeAllSnapshots(AdminId aid,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* List the snapshots present on this Storage Node. The SN will choose the
* first managed service it can find and return the list of file names.
*
* @return an arry of file names for the snapshots. If no snapshots are
* present this is a zero-length array.
*
* @since 3.0
*/
public String [] listSnapshots(AuthContext authCtx, short serialVersion)
throws RemoteException;
/**
* Snapshot configurations of current storage node in specified snapshot.
* @param snapshotName full name of the snapshot.
*/
public void createSnapshotConfig(String snapshotName,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Remove snapshot configurations of current storage node for specified
* snapshot.
* @param snapshotName full name of the snapshot to be removed.
*/
public void removeSnapshotConfig(String snapshotName,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Remove all snapshots of configurations for current storage node.
*/
public void removeAllSnapshotConfigs(AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Note that the status has changed for an RN or Arbiter service managed by
* this SN
*
* @param serviceId the ID of the RN or Arbiter
* @param newStatus information about the changed service status
* @since 21.2
*/
void updateNodeStatus(ResourceId serviceId,
ServiceStatusChange newStatus,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Note that the status has changed for the Admin service managed by this
* SN.
*
* @param newStatus information about the changed service status
* @param isMaster whether the admin is the master
* @since 21.2
*/
void updateAdminStatus(ServiceStatusChange newStatus,
boolean isMaster,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Note new statistics from an RN or Arbiter service managed by this SN.
*
* @param serviceId the ID of the RN or Arbiter
* @param packet the statistics
* @since 21.2
*/
void receiveStats(ResourceId serviceId,
StatsPacket packet,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Note new statistics from the admin managed by this SN.
*
* @param packet the statistics
* @since 21.2
*/
void receiveAdminStats(StatsPacket packet,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Note that parameters have changed for a service managed by this SN.
*
* @param serviceId the ID of the service
* @param newMap the new service parameters
* @since 21.2
*/
void receiveNewParams(ResourceId serviceId,
ParameterMap newMap,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Note that status has changed for a plan being run by the admin managed
* by this SN.
*
* @param planStatus a string describing the plan status change
* @since 21.2
*/
void updatePlanStatus(String planStatus,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Note that the replication state has changed for an RN managed by this
* SN.
*
* @param rnId the ID of the RN
* @param changeEvent an event describing the state change
* @since 21.2
*/
void updateReplicationState(RepNodeId rnId,
StateChangeEvent changeEvent,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Returns information about TLS credentials for this SNA as a
* JSON-formatted string.
*
* @return information about TLS credentials in JSON format
* @throws RemoteException if a network failure occurs
* @since 24.4
*/
String getTlsCredentialsInfo(AuthContext authCtx, short serialVersion)
throws RemoteException;
/**
* Attempts to retrieve any new TLS credentials and store them in the
* update directory.
*
* @return a description of the results of retrieving credentials
* @throws IllegalStateException if there was a problem retrieving the
* credentials
* @throws RemoteException if a network failure occurs
* @since 24.4
*/
public String retrieveTlsCredentials(AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Verify the correctness of TLS credential updates, returning the hashes
* of the keystore and truststore files that will be installed after the
* updates, if any, are installed. If the force flag was specified, skip
* verifying the credentials within the keystores.
*
* @param force whether the force flag was specified
* @return the credential hashes
* @throws RemoteException if a network failure occurs
* @throws IllegalStateException if a problem is detected
* @since 24.4
*/
public CredentialHashes verifyTlsCredentialUpdates(boolean force,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Add entries from the truststore update found in the update directory to
* the installed truststore, and update the client truststore.
*
* @returns a description of the update performed
* @throws IllegalStateException if the update fails
* @throws RemoteException if a network failure occurs
* @since 24.4
*/
public String addTruststoreUpdates(AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Install the keystore update found in the update directory.
*
* @returns a description of the update performed
* @throws IllegalStateException if the update fails
* @throws RemoteException if a network failure occurs
* @since 24.4
*/
public String installKeystoreUpdate(String keystoreHash,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
/**
* Install the truststore update found in the update directory, and update
* the client truststore.
*
* @returns a description of the update performed
* @throws IllegalStateException if the update fails
* @throws RemoteException if a network failure occurs
* @since 24.4
*/
public String installTruststoreUpdate(String truststoreHash,
AuthContext authCtx,
short serialVersion)
throws RemoteException;
}
|
apache/jackrabbit-oak | 36,619 | oak-search/src/test/java/org/apache/jackrabbit/oak/plugins/index/IndexQueryCommonTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.api.ResultRow;
import org.apache.jackrabbit.oak.api.Result;
import org.apache.jackrabbit.oak.commons.junit.LogCustomizer;
import org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants;
import org.apache.jackrabbit.oak.query.AbstractQueryTest;
import org.apache.jackrabbit.oak.query.SQL2Parser;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.event.Level;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.jcr.query.Query;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.JcrConstants.NT_UNSTRUCTURED;
import static org.apache.jackrabbit.oak.api.QueryEngine.NO_BINDINGS;
import static org.apache.jackrabbit.oak.api.Type.STRING;
import static org.apache.jackrabbit.oak.api.Type.STRINGS;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.REINDEX_PROPERTY_NAME;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_VALUE_REGEX;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests the query engine using the default index implementation: the
* IndexProvider
*/
public abstract class IndexQueryCommonTest extends AbstractQueryTest {
protected Tree indexDefn;
protected IndexOptions indexOptions;
protected TestRepository repositoryOptionsUtil;
private LogCustomizer logCustomizer;
private final String nativeWarnLog = "Native queries are deprecated. Query:";
@Before
public void setupLogger(){
logCustomizer = LogCustomizer.forLogger(SQL2Parser.class.getName()).enable(Level.WARN)
.contains(nativeWarnLog).create();
logCustomizer.starting();
}
@After
public void closeLogger(){
logCustomizer.finished();
}
@Override
protected void createTestIndexNode() throws Exception {
Tree index = root.getTree("/");
indexDefn = createTestIndexNode(index, indexOptions.getIndexType());
TestUtil.useV2(indexDefn);
indexDefn.setProperty(FulltextIndexConstants.EVALUATE_PATH_RESTRICTION, true);
indexDefn.setProperty("tags", "x");
Tree props = TestUtil.newRulePropTree(indexDefn, "nt:base");
props.getParent().setProperty(FulltextIndexConstants.INDEX_NODE_NAME, true);
TestUtil.enablePropertyIndex(props, "c1/p", false);
TestUtil.enableForFullText(props, FulltextIndexConstants.REGEX_ALL_PROPS, true);
TestUtil.enablePropertyIndex(props, "a/name", false);
TestUtil.enablePropertyIndex(props, "b/name", false);
TestUtil.enableFunctionIndex(props, "length([name])");
TestUtil.enableFunctionIndex(props, "lower([name])");
TestUtil.enableFunctionIndex(props, "upper([name])");
TestUtil.enableForFullText(props, "propa", false);
TestUtil.enableForFullText(props, "propb", false);
Tree dateProp = TestUtil.enableForOrdered(props, "propDate");
dateProp.setProperty(FulltextIndexConstants.PROP_TYPE, "Date");
// Note - certain tests in this class like #sql2 test regex based like queries.
// And since all the tests here use this common full text index - please be careful while adding any new properties.
// For example - #sql2() tests with a query on length of name property.
// Since this is a fulltext index with a regex property that indexes everything, those property names are also indexed.
// So if we add any property with propName that has length equal to what that test expects - that will effectively break the #sql2() test (giving more results).
// Ideally one would see the test failing while adding new properties - but there have been cases where this test was ignored due to a different reason
// and adding a new property added more failure reasons.
// So just be careful while changing the test collateral/setup here.
root.commit();
}
// TODO : The below 3 tests - #sql1, #sq2 and #sql2FullText need refactoring.
// These are huge tests with multiple queries running and verification happening in the end by comparing against results in an expected test file.
// These could possibly be broken down into several smaller tests instead which would make debugging much easier.
@Test
public void sql1() throws Exception {
test("sql1.txt");
}
@Test
public void sql2() throws Exception {
test("sql2.txt");
}
@Test
public void sql2FullText() throws Exception {
test("sql2-fulltext.txt");
}
@Test
public void testValueRegex() throws Exception {
Tree test = root.getTree("/").addChild("test");
Tree a = test.addChild("a");
Tree b = test.addChild("b");
a.setProperty("name", "hello");
b.setProperty("name", "hello pattern");
root.commit();
final String query = "select [jcr:path] from [nt:base] where isdescendantnode('/test') and contains(*, 'hello')";
assertEventually(() -> {
Iterator<String> result = executeQuery(query, Query.JCR_SQL2).iterator();
List<String> paths = new ArrayList<>();
result.forEachRemaining(paths::add);
assertEquals(2, paths.size());
assertEquals(paths.get(0), a.getPath());
assertEquals(paths.get(1), b.getPath());
});
indexDefn.setProperty(PROP_VALUE_REGEX, "pat*");
indexDefn.setProperty(REINDEX_PROPERTY_NAME, true);
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(query, Query.JCR_SQL2).iterator();
List<String> paths = new ArrayList<>();
result.forEachRemaining(paths::add);
assertEquals(1, paths.size());
assertEquals(paths.get(0), b.getPath());
});
}
@Test
public void descendantTest() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("a");
test.addChild("b");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(
"select [jcr:path] from [nt:base] where isdescendantnode('/test')",
Query.JCR_SQL2).iterator();
assertTrue(result.hasNext());
assertEquals("/test/a", result.next());
assertEquals("/test/b", result.next());
assertFalse(result.hasNext());
});
}
@Test
public void descendantTestWithIndexTag() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("a");
test.addChild("b");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(
"select [jcr:path] from [nt:base] where isdescendantnode('/test') option (index tag x)",
Query.JCR_SQL2).iterator();
assertTrue(result.hasNext());
assertEquals("/test/a", result.next());
assertEquals("/test/b", result.next());
assertFalse(result.hasNext());
});
}
@Test
public void descendantTestWithIndexTagExplain() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("a");
test.addChild("b");
root.commit();
String query = "explain select [jcr:path] from [nt:base] where isdescendantnode('/test') option (index tag x)";
assertEventually(getAssertionForExplain(query, Query.JCR_SQL2, getExplainValueForDescendantTestWithIndexTagExplain(), false));
}
// Check if this is a valid behaviour or not ?
// This was discovered when we removed setTraversalEnabled(false); from the test setup.
@Ignore("Index not picked even when using option tag if traversal cost is lower")
@Test
public void descendantTestWithIndexTagExplainWithNoData() {
String query = "explain select [jcr:path] from [nt:base] where isdescendantnode('/test') option (index tag x)";
assertEventually(getAssertionForExplain(query, Query.JCR_SQL2, getExplainValueForDescendantTestWithIndexTagExplain(), false));
}
@Test
public void descendantTest2() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("name", List.of("Hello", "World"), STRINGS);
test.addChild("b").setProperty("name", "Hello");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(
"select [jcr:path] from [nt:base] where isdescendantnode('/test') and name='World'",
Query.JCR_SQL2).iterator();
assertTrue(result.hasNext());
assertEquals("/test/a", result.next());
assertFalse(result.hasNext());
});
}
@Test
public void isChildNodeTest() throws Exception {
Tree tree = root.getTree("/");
Tree parents = tree.addChild("parents");
parents.addChild("p0").setProperty("id", "0");
parents.addChild("p1").setProperty("id", "1");
parents.addChild("p2").setProperty("id", "2");
Tree children = tree.addChild("children");
children.addChild("c1").setProperty("p", "1");
children.addChild("c2").setProperty("p", "2");
children.addChild("c3").setProperty("p", "3");
children.addChild("c4").setProperty("p", "4");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(
"select p.[jcr:path], p2.[jcr:path] from [nt:base] as p inner join [nt:base] as p2 on ischildnode(p2, p) where p.[jcr:path] = '/'",
Query.JCR_SQL2).iterator();
assertTrue(result.hasNext());
assertEquals("/, /children", result.next());
assertEquals("/, /jcr:system", result.next());
assertEquals("/, /oak:index", result.next());
assertEquals("/, /parents", result.next());
assertFalse(result.hasNext());
});
}
@Test
public void contains() throws Exception {
String h = "Hello" + System.currentTimeMillis();
String w = "World" + System.currentTimeMillis();
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("name", List.of(h, w), STRINGS);
test.addChild("b").setProperty("name", h);
root.commit();
// query 'hello'
assertEventually(() ->
assertQuery("/jcr:root//*[jcr:contains(., '" + h + "')]", "xpath", List.of("/test/a", "/test/b"))
);
// query 'world'
assertEventually(() ->
assertQuery("/jcr:root//*[jcr:contains(., '" + w + "')]", "xpath", List.of("/test/a"))
);
}
@Ignore("OAK-2424")
@Test
public void containsDash() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("name", "hello-wor");
test.addChild("b").setProperty("name", "hello-world");
test.addChild("c").setProperty("name", "hello");
root.commit();
assertQuery("/jcr:root//*[jcr:contains(., 'hello-wor*')]", "xpath", List.of("/test/a", "/test/b"));
assertQuery("/jcr:root//*[jcr:contains(., '*hello-wor*')]", "xpath", List.of("/test/a", "/test/b"));
}
@Ignore("OAK-2424")
@Test
public void multiPhraseQuery() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("dc:format", "type:application/pdf");
test.addChild("b").setProperty("dc:format", "progress");
root.commit();
assertQuery("/jcr:root//*[jcr:contains(@dc:format, 'pro*')]", "xpath", List.of("/test/b"));
assertQuery("/jcr:root//*[jcr:contains(@dc:format, 'type:appli*')]", "xpath", List.of("/test/a"));
}
@Test
public void containsPath() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("name", "/parent/child/node");
root.commit();
String stmt = "//*[jcr:contains(., '/parent/child')]";
assertEventually(() -> assertQuery(stmt, "xpath", List.of("/test/a")));
}
@Test
public void containsPathNum() throws Exception {
Tree test = root.getTree("/").addChild("test");
Tree a = test.addChild("a");
a.setProperty("name", "/segment1/segment2/segment3");
root.commit();
String stmt = "//*[jcr:contains(., '/segment1/segment2')]";
assertEventually(() -> assertQuery(stmt, "xpath", List.of("/test/a")));
}
@Test
public void containsPathStrict() throws Exception {
root.getTree("/").addChild("matchOnPath");
root.getTree("/").addChild("match_on_path");
root.commit();
String stmt = "//*[jcr:contains(., 'match')]";
assertEventually(() -> assertQuery(stmt, "xpath", List.of("/match_on_path")));
}
@Test
public void containsPathStrictNum() throws Exception {
root.getTree("/").addChild("matchOnPath1234");
root.getTree("/").addChild("match_on_path1234");
root.commit();
String stmt = "//*[jcr:contains(., 'match')]";
assertEventually(() -> assertQuery(stmt, "xpath", List.of("/match_on_path1234")));
}
/**
* OAK-1208 property existence constraints break queries
*/
@Test
public void testOAK1208() throws Exception {
Tree t = root.getTree("/").addChild("containsWithMultipleOr");
Tree one = t.addChild("one");
one.setProperty("p", "dam/smartcollection");
one.setProperty("t", "media");
Tree two = t.addChild("two");
two.setProperty("p", "dam/collection");
two.setProperty("t", "media");
Tree three = t.addChild("three");
three.setProperty("p", "dam/hits");
three.setProperty("t", "media");
root.commit();
String stmt = "//*[jcr:contains(., 'media') and (@p = 'dam/smartcollection' or @p = 'dam/collection') ]";
assertEventually(() -> assertQuery(stmt, "xpath", List.of(one.getPath(), two.getPath())));
}
@Test
public void testNativeLuceneQuery() throws Exception {
String nativeQueryString = "select [jcr:path] from [nt:base] where native('lucene', 'title:foo -title:bar')";
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("title", "foo");
test.addChild("b").setProperty("title", "bar");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(nativeQueryString, Query.JCR_SQL2).iterator();
assertTrue(result.hasNext());
assertEquals("/test/a", result.next());
assertFalse(result.hasNext());
});
assertNotEquals(0, logCustomizer.getLogs().size());
assertTrue("native query WARN message is not present, message in Logger is "
+ logCustomizer.getLogs(), logCustomizer.getLogs().get(0).contains(nativeQueryString));
}
@Test
public void repSimilarAsNativeQuery() throws Exception {
String nativeQueryString = "select [jcr:path] from [nt:base] where " +
"native('lucene', 'mlt?stream.body=/test/a&mlt.fl=:path&mlt.mindf=0&mlt.mintf=0')";
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("text", "Hello World");
test.addChild("b").setProperty("text", "He said Hello and then the world said Hello as well.");
test.addChild("c").setProperty("text", "He said Hi.");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(nativeQueryString, Query.JCR_SQL2, false, true).iterator();
assertTrue(result.hasNext());
assertEquals("/test/a", result.next());
assertTrue(result.hasNext());
assertEquals("/test/b", result.next());
while (result.hasNext()) {
assertNotEquals("/test/c", result.next());
}
});
assertNotEquals(0, logCustomizer.getLogs().size());
assertTrue("native query WARN message is not present, message in Logger is "
+ logCustomizer.getLogs(), logCustomizer.getLogs().get(0).contains(nativeWarnLog));
}
@Test
public void repSimilarQuery() throws Exception {
String query = "select [jcr:path] from [nt:base] where similar(., '/test/a')";
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("text", "Hello World Hello World");
test.addChild("b").setProperty("text", "Hello World");
test.addChild("c").setProperty("text", "World");
test.addChild("d").setProperty("text", "Hello");
test.addChild("e").setProperty("text", "Bye Bye");
test.addChild("f").setProperty("text", "Hello");
test.addChild("g").setProperty("text", "World");
test.addChild("h").setProperty("text", "Hello");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(query, Query.JCR_SQL2).iterator();
assertTrue(result.hasNext());
assertEquals("/test/a", result.next());
assertTrue(result.hasNext());
assertEquals("/test/b", result.next());
assertTrue(result.hasNext());
assertQuery(query, List.of("/test/a", "/test/b", "/test/c", "/test/d", "/test/f", "/test/g", "/test/h"));
});
}
@Test
public void repSimilarXPathQuery() throws Exception {
String query = "//element(*, nt:base)[rep:similar(., '/test/a')]";
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("text", "Hello World Hello World");
test.addChild("b").setProperty("text", "Hello World");
test.addChild("c").setProperty("text", "World");
test.addChild("d").setProperty("text", "Hello");
test.addChild("e").setProperty("text", "Bye Bye");
test.addChild("f").setProperty("text", "Hello");
test.addChild("g").setProperty("text", "World");
test.addChild("h").setProperty("text", "Hello");
root.commit();
assertEventually(() -> {
Iterator<String> result = executeQuery(query, "xpath").iterator();
assertTrue(result.hasNext());
assertEquals("/test/a", result.next());
assertTrue(result.hasNext());
assertEquals("/test/b", result.next());
assertQuery(query, "xpath",
List.of("/test/a", "/test/b", "/test/c", "/test/d", "/test/f", "/test/g", "/test/h"));
});
}
@Test
public void testTokenizeCN() throws Exception {
Tree t = root.getTree("/").addChild("containsCN");
Tree one = t.addChild("one");
one.setProperty("t", "美女衬衫");
root.commit();
assertEventually(() -> assertQuery("//*[jcr:contains(., '美女')]", "xpath", List.of(one.getPath())));
}
@Test
public void testMultiValuedPropUpdate() throws Exception {
Tree test = root.getTree("/").addChild("test");
String child = "child";
String mulValuedProp = "prop";
test.addChild(child).setProperty(mulValuedProp, List.of("foo", "bar"), STRINGS);
root.commit();
assertEventually(() -> assertQuery("/jcr:root//*[jcr:contains(@" + mulValuedProp + ", 'foo')]", "xpath", List.of("/test/" + child)));
test.getChild(child).setProperty(mulValuedProp, List.of(), STRINGS);
root.commit();
assertEventually(() -> assertQuery("/jcr:root//*[jcr:contains(@" + mulValuedProp + ", 'foo')]", "xpath", new ArrayList<>()));
test.getChild(child).setProperty(mulValuedProp, List.of("bar"), STRINGS);
root.commit();
assertEventually(() -> assertQuery("/jcr:root//*[jcr:contains(@" + mulValuedProp + ", 'foo')]", "xpath", new ArrayList<>()));
test.getChild(child).removeProperty(mulValuedProp);
root.commit();
assertEventually(() -> assertQuery("/jcr:root//*[jcr:contains(@" + mulValuedProp + ", 'foo')]", "xpath", new ArrayList<>()));
}
@SuppressWarnings("unused")
private static void walktree(final Tree t) {
System.out.println("+ " + t.getPath());
for (PropertyState p : t.getProperties()) {
System.out.println(" -" + p.getName() + "=" + p.getValue(STRING));
}
for (Tree t1 : t.getChildren()) {
walktree(t1);
}
}
@Test
public void oak3371() throws Exception {
setTraversalEnabled(false);
Tree t, t1;
t = root.getTree("/");
t = child(t, "test", NT_UNSTRUCTURED);
t1 = child(t, "a", NT_UNSTRUCTURED);
t1.setProperty("foo", "bar");
t1 = child(t, "b", NT_UNSTRUCTURED);
t1.setProperty("foo", "cat");
t1 = child(t, "c", NT_UNSTRUCTURED);
t1 = child(t, "d", NT_UNSTRUCTURED);
t1.setProperty("foo", "bar cat");
root.commit();
assertEventually(() -> {
assertQuery(
"SELECT * FROM [nt:unstructured] WHERE ISDESCENDANTNODE('/test') AND CONTAINS(foo, 'bar')",
List.of("/test/a", "/test/d"));
assertQuery(
"SELECT * FROM [nt:unstructured] WHERE ISDESCENDANTNODE('/test') AND NOT CONTAINS(foo, 'bar')",
List.of("/test/b", "/test/c"));
assertQuery(
"SELECT * FROM [nt:unstructured] WHERE ISDESCENDANTNODE('/test') AND CONTAINS(foo, 'bar cat')",
List.of("/test/d"));
assertQuery(
"SELECT * FROM [nt:unstructured] WHERE ISDESCENDANTNODE('/test') AND NOT CONTAINS(foo, 'bar cat')",
List.of("/test/c"));
});
setTraversalEnabled(true);
}
@Test
public void fullTextQueryTestAllowLeadingWildcards() throws Exception {
//add content
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("propa", "ship_to_canada");
test.addChild("b").setProperty("propa", "steamship_to_canada");
test.addChild("c").setProperty("propa", "ship_to_can");
test.addChild("d").setProperty("propa", "starship");
test.addChild("e").setProperty("propa", "Hello starship");
root.commit();
String query = "//*[jcr:contains(@propa, 'Hello *ship')] ";
assertEventually(() -> assertQuery(query, XPATH, List.of("/test/e")));
}
@Test
public void fullTextQueryTestAllowLeadingWildcards2() throws Exception {
//add content
Tree test = root.getTree("/").addChild("test");
test.addChild("a").setProperty("propa", "ship_to_canada");
test.addChild("b").setProperty("propa", "steamship_to_canada");
test.addChild("c").setProperty("propa", "ship_to_can");
test.addChild("d").setProperty("propa", "starship");
test.addChild("e").setProperty("propa", "Hello starship");
root.commit();
String query = "//*[jcr:contains(@propa, '*ship to can*')] ";
assertEventually(() -> assertQuery(query, XPATH, List.of("/test/a", "/test/b", "/test/c")));
}
@Test
public void fullTextQueryGeneric() throws Exception {
Tree test = root.getTree("/").addChild("test");
Tree testNodeA = test.addChild("nodea");
testNodeA.setProperty("a", "hello");
testNodeA.setProperty("b", "ocean");
Tree testNodeB = test.addChild("nodeb");
testNodeB.setProperty("a", "hello world");
testNodeB.setProperty("b", "soccer-shoe");
Tree testNodeC = test.addChild("nodec");
testNodeC.setProperty("a", "hello");
testNodeC.setProperty("b", "world");
root.commit();
assertEventually(() -> {
// case insensitive
assertQuery("//*[jcr:contains(., 'WORLD')] ", XPATH, List.of("/test/nodeb", "/test/nodec"));
// wild card
assertQuery("//*[jcr:contains(., 'Hell*')] ", XPATH, List.of("/test/nodea", "/test/nodeb", "/test/nodec"));
assertQuery("//*[jcr:contains(., 'He*o')] ", XPATH, List.of("/test/nodea", "/test/nodeb", "/test/nodec"));
assertQuery("//*[jcr:contains(., '*llo')] ", XPATH, List.of("/test/nodea", "/test/nodeb", "/test/nodec"));
assertQuery("//*[jcr:contains(., '?orld')] ", XPATH, List.of("/test/nodeb", "/test/nodec"));
assertQuery("//*[jcr:contains(., 'wo?ld')] ", XPATH, List.of("/test/nodeb", "/test/nodec"));
assertQuery("//*[jcr:contains(., 'worl?')] ", XPATH, List.of("/test/nodeb", "/test/nodec"));
// space explained as AND
assertQuery("//*[jcr:contains(., 'hello world')] ", XPATH, List.of("/test/nodeb", "/test/nodec"));
// exclude
assertQuery("//*[jcr:contains(., 'hello -world')] ", XPATH, List.of("/test/nodea"));
// explicit OR
assertQuery("//*[jcr:contains(., 'ocean OR world')] ", XPATH, List.of("/test/nodea", "/test/nodeb", "/test/nodec"));
});
}
@Test
public void testInequalityQuery_native() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propa", "hello");
test.addChild("test2").setProperty("propa", "foo");
test.addChild("test3").setProperty("propa", "foo");
test.addChild("test4").setProperty("propa", "bar");
test.addChild("test5").setProperty("propa", "bar");
test.addChild("test6");
root.commit();
String query = "explain /jcr:root/test//*[propa!='bar']";
assertEventually(getAssertionForExplain(query, XPATH, getContainsValueForInequalityQuery_native(), false));
String query2 = "/jcr:root/test//*[propa!='bar']";
assertEventually(() -> assertQuery(query2, XPATH, List.of("/test/test1", "/test/test2", "/test/test3")));
}
@Test
public void testNotNullQuery_native() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propa", "hello");
test.addChild("test2").setProperty("propa", "foo");
test.addChild("test3").setProperty("propa", "foo");
test.addChild("test4");
root.commit();
String query = "explain select * from [nt:base] as s where propa is not null and ISDESCENDANTNODE(s, '/test')";
assertEventually(getAssertionForExplain(query, SQL2, getContainsValueForNotNullQuery_native(), false));
String query2 = "select * from [nt:base] as s where propa is not null and ISDESCENDANTNODE(s, '/test')";
assertEventually(() -> assertQuery(query2, SQL2, List.of("/test/test1", "/test/test2", "/test/test3")));
}
@Test
public void testInequalityQueryWithoutAncestorFilter_native() throws Exception {
Tree test = root.getTree("/");
test.addChild("test1").setProperty("propa", "hello");
test.addChild("test2").setProperty("propa", "foo");
test.addChild("test3").setProperty("propa", "foo");
test.addChild("test4").setProperty("propa", "bar");
test.addChild("test5").setProperty("propa", "bar");
test.addChild("test6");
root.commit();
String query = "explain //*[propa!='bar']";
assertEventually(getAssertionForExplain(query, XPATH, getContainsValueForInequalityQueryWithoutAncestorFilter_native(), false));
String query2 = "//*[propa!='bar']";
assertEventually(() -> assertQuery(query2, XPATH, List.of("/test1", "/test2", "/test3")));
}
@Test
public void testEqualityInequalityCombined_native() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propa", "hello");
test.getChild("test1").setProperty("propb", "world");
test.addChild("test2").setProperty("propa", "foo");
test.addChild("test3").setProperty("propa", "foo");
test.addChild("test4").setProperty("propa", "bar");
test.addChild("test5").setProperty("propa", "bar");
test.addChild("test6").setProperty("propb", "world");
root.commit();
String query = "explain /jcr:root/test//*[propa!='bar' and propb='world']";
assertEventually(getAssertionForExplain(query, XPATH, getContainsValueForEqualityInequalityCombined_native(), false));
String query2 = "/jcr:root/test//*[propa!='bar' and propb='world']";
// Expected - nodes with both properties defined and propb with value 'world' and propa with value not equal to bar should be returned
// /test/test6 should NOT be returned because for it propa = null
assertEventually(() -> assertQuery(query2, XPATH, List.of("/test/test1")));
}
@Test
public void testEqualityQuery_native() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propa", "foo");
test.addChild("test2").setProperty("propa", "foo");
test.addChild("test3").setProperty("propa", "foo");
test.addChild("test4").setProperty("propa", "bar");
root.commit();
String query = "explain /jcr:root/test//*[propa='bar']";
assertEventually(getAssertionForExplain(query, XPATH, getContainsValueForEqualityQuery_native(), false));
String query2 = "/jcr:root/test//*[propa='bar']";
assertEventually(() -> assertQuery(query2, XPATH, List.of("/test/test4")));
}
@Test
public void testDateQueryWithIncorrectData() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propDate", "foo");
test.getChild("test1").setProperty("propa", "bar");
test.addChild("test2").setProperty("propDate", "2021-01-22T01:02:03.000Z", Type.DATE);
test.addChild("test2").setProperty("propa", "bar");
test.addChild("test3").setProperty("propDate", "2022-01-22T01:02:03.000Z", Type.DATE);
root.commit();
// Query on propa should work fine even if the data on propDate is of incorrect type (i.e String instead of Date)
// It should return both /test/test1 -> where content for propDate is of incorrect data type
// and /test/test2 -> where content for propDate is of correct data type.
String query = "/jcr:root/test//*[propa='bar']";
assertEventually(() -> assertQuery(query, XPATH, List.of("/test/test2", "/test/test1")));
// Check inequality query on propDate - this should not return /test/test1 -> since that node should not have been indexed for propDate
// due to incorrect data type in the content for this property.
String query2 = "/jcr:root/test//*[propDate!='2021-01-22T01:02:03.000Z']";
assertEventually(() -> assertQuery(query2, XPATH, List.of("/test/test3")));
}
@Test
public void testQueryWithDifferentDataTypesForSameProperty() throws Exception {
// propa doesn't have any type defined in index - so by default it's a String type property
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propa", "bar");
test.addChild("test2").setProperty("propa", 10);
test.addChild("test3").setProperty("propa", 10L);
test.addChild("test4").setProperty("propa", true);
root.commit();
// Below queries will ensure propa is searchable with different data types as content and behaviour is similar for lucene and elastic.
String query = "/jcr:root/test//*[propa='bar']";
assertEventually(() -> assertQuery(query, XPATH, List.of("/test/test1")));
String query2 = "/jcr:root/test//*[propa=true]";
assertEventually(() -> assertQuery(query2, XPATH, List.of("/test/test4")));
String query3 = "/jcr:root/test//*[propa=10]";
assertEventually(() -> assertQuery(query3, XPATH, List.of("/test/test2", "/test/test3")));
}
@Test
public void testDateQueryWithCorrectData() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propa", "foo");
test.getChild("test1").setProperty("propDate", "2021-01-22T01:02:03.000Z", Type.DATE);
test.addChild("test2").setProperty("propa", "foo");
root.commit();
// Test query returns correct node on querying on dateProp
String query = "/jcr:root/test//*[propDate='2021-01-22T01:02:03.000Z']";
assertEventually(() -> assertQuery(query, XPATH, List.of("/test/test1")));
// Test query returns correct node on querying on String type property
String query2 = "/jcr:root/test//*[propa='foo']";
assertEventually(() -> assertQuery(query2, XPATH, List.of("/test/test1", "/test/test2")));
}
@Test
public void testDateQueryWithCorrectData_Ordered() throws Exception {
Tree test = root.getTree("/").addChild("test");
test.addChild("test1").setProperty("propa", "foo");
test.getChild("test1").setProperty("propDate", "2021-01-22T01:02:03.000Z", Type.DATE);
test.addChild("test2").setProperty("propa", "foo");
test.addChild("test2").setProperty("propDate", "2019-01-22T01:02:03.000Z", Type.DATE);
test.addChild("test3").setProperty("propa", "foo");
test.addChild("test3").setProperty("propDate", "2020-01-22T01:02:03.000Z", Type.DATE);
root.commit();
// Test query returns correct node on querying on dateProp
String query = "/jcr:root/test//*[propa='foo'] order by @propDate descending";
assertEventually(() -> assertQuery(query, XPATH, List.of("/test/test1", "/test/test3", "/test/test2"), true, true));
}
private static Tree child(Tree t, String n, String type) {
Tree t1 = t.addChild(n);
t1.setProperty(JCR_PRIMARYTYPE, type, Type.NAME);
return t1;
}
public abstract String getContainsValueForEqualityQuery_native();
public abstract String getContainsValueForInequalityQuery_native();
public abstract String getContainsValueForInequalityQueryWithoutAncestorFilter_native();
public abstract String getContainsValueForEqualityInequalityCombined_native();
public abstract String getContainsValueForNotNullQuery_native();
public abstract String getExplainValueForDescendantTestWithIndexTagExplain();
protected Runnable getAssertionForExplain(String query, String language, String expected, boolean matchComplete) {
return () -> {
Result result = null;
try {
result = executeQuery(query, language, NO_BINDINGS);
} catch (ParseException e) {
fail(e.getMessage());
}
ResultRow row = result.getRows().iterator().next();
if (matchComplete) {
assertEquals(expected, row.getValue("plan").toString());
} else {
assertTrue(row.getValue("plan").toString().contains(expected));
}
};
}
protected static void assertEventually(Runnable r) {
TestUtil.assertEventually(r, 3000 * 3);
}
}
|
googleads/google-ads-java | 37,809 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/CampaignThirdPartyBrandLiftIntegrationPartner.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/common/third_party_integration_partners.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.common;
/**
* <pre>
* Container for third party Brand Lift integration data for Campaign.
* Next Id = 4
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner}
*/
public final class CampaignThirdPartyBrandLiftIntegrationPartner extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner)
CampaignThirdPartyBrandLiftIntegrationPartnerOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignThirdPartyBrandLiftIntegrationPartner.newBuilder() to construct.
private CampaignThirdPartyBrandLiftIntegrationPartner(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignThirdPartyBrandLiftIntegrationPartner() {
brandLiftIntegrationPartner_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignThirdPartyBrandLiftIntegrationPartner();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandLiftIntegrationPartner_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandLiftIntegrationPartner_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner.class, com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner.Builder.class);
}
private int bitField0_;
public static final int BRAND_LIFT_INTEGRATION_PARTNER_FIELD_NUMBER = 1;
private int brandLiftIntegrationPartner_ = 0;
/**
* <pre>
* Allowed third party integration partners for Brand Lift verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner brand_lift_integration_partner = 1;</code>
* @return The enum numeric value on the wire for brandLiftIntegrationPartner.
*/
@java.lang.Override public int getBrandLiftIntegrationPartnerValue() {
return brandLiftIntegrationPartner_;
}
/**
* <pre>
* Allowed third party integration partners for Brand Lift verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner brand_lift_integration_partner = 1;</code>
* @return The brandLiftIntegrationPartner.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner getBrandLiftIntegrationPartner() {
com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner result = com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner.forNumber(brandLiftIntegrationPartner_);
return result == null ? com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner.UNRECOGNIZED : result;
}
public static final int BRAND_LIFT_INTEGRATION_PARTNER_DATA_FIELD_NUMBER = 2;
private com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brandLiftIntegrationPartnerData_;
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
* @return Whether the brandLiftIntegrationPartnerData field is set.
*/
@java.lang.Override
public boolean hasBrandLiftIntegrationPartnerData() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
* @return The brandLiftIntegrationPartnerData.
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData getBrandLiftIntegrationPartnerData() {
return brandLiftIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandLiftIntegrationPartnerData_;
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder getBrandLiftIntegrationPartnerDataOrBuilder() {
return brandLiftIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandLiftIntegrationPartnerData_;
}
public static final int SHARE_COST_FIELD_NUMBER = 3;
private boolean shareCost_ = false;
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @return The shareCost.
*/
@java.lang.Override
public boolean getShareCost() {
return shareCost_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (brandLiftIntegrationPartner_ != com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner.UNSPECIFIED.getNumber()) {
output.writeEnum(1, brandLiftIntegrationPartner_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getBrandLiftIntegrationPartnerData());
}
if (shareCost_ != false) {
output.writeBool(3, shareCost_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (brandLiftIntegrationPartner_ != com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, brandLiftIntegrationPartner_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getBrandLiftIntegrationPartnerData());
}
if (shareCost_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, shareCost_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner other = (com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner) obj;
if (brandLiftIntegrationPartner_ != other.brandLiftIntegrationPartner_) return false;
if (hasBrandLiftIntegrationPartnerData() != other.hasBrandLiftIntegrationPartnerData()) return false;
if (hasBrandLiftIntegrationPartnerData()) {
if (!getBrandLiftIntegrationPartnerData()
.equals(other.getBrandLiftIntegrationPartnerData())) return false;
}
if (getShareCost()
!= other.getShareCost()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BRAND_LIFT_INTEGRATION_PARTNER_FIELD_NUMBER;
hash = (53 * hash) + brandLiftIntegrationPartner_;
if (hasBrandLiftIntegrationPartnerData()) {
hash = (37 * hash) + BRAND_LIFT_INTEGRATION_PARTNER_DATA_FIELD_NUMBER;
hash = (53 * hash) + getBrandLiftIntegrationPartnerData().hashCode();
}
hash = (37 * hash) + SHARE_COST_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getShareCost());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for third party Brand Lift integration data for Campaign.
* Next Id = 4
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner)
com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartnerOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandLiftIntegrationPartner_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandLiftIntegrationPartner_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner.class, com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner.Builder.class);
}
// Construct using com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getBrandLiftIntegrationPartnerDataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
brandLiftIntegrationPartner_ = 0;
brandLiftIntegrationPartnerData_ = null;
if (brandLiftIntegrationPartnerDataBuilder_ != null) {
brandLiftIntegrationPartnerDataBuilder_.dispose();
brandLiftIntegrationPartnerDataBuilder_ = null;
}
shareCost_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandLiftIntegrationPartner_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner getDefaultInstanceForType() {
return com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner build() {
com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner buildPartial() {
com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner result = new com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.brandLiftIntegrationPartner_ = brandLiftIntegrationPartner_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.brandLiftIntegrationPartnerData_ = brandLiftIntegrationPartnerDataBuilder_ == null
? brandLiftIntegrationPartnerData_
: brandLiftIntegrationPartnerDataBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.shareCost_ = shareCost_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner) {
return mergeFrom((com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner other) {
if (other == com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner.getDefaultInstance()) return this;
if (other.brandLiftIntegrationPartner_ != 0) {
setBrandLiftIntegrationPartnerValue(other.getBrandLiftIntegrationPartnerValue());
}
if (other.hasBrandLiftIntegrationPartnerData()) {
mergeBrandLiftIntegrationPartnerData(other.getBrandLiftIntegrationPartnerData());
}
if (other.getShareCost() != false) {
setShareCost(other.getShareCost());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
brandLiftIntegrationPartner_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18: {
input.readMessage(
getBrandLiftIntegrationPartnerDataFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
shareCost_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int brandLiftIntegrationPartner_ = 0;
/**
* <pre>
* Allowed third party integration partners for Brand Lift verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner brand_lift_integration_partner = 1;</code>
* @return The enum numeric value on the wire for brandLiftIntegrationPartner.
*/
@java.lang.Override public int getBrandLiftIntegrationPartnerValue() {
return brandLiftIntegrationPartner_;
}
/**
* <pre>
* Allowed third party integration partners for Brand Lift verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner brand_lift_integration_partner = 1;</code>
* @param value The enum numeric value on the wire for brandLiftIntegrationPartner to set.
* @return This builder for chaining.
*/
public Builder setBrandLiftIntegrationPartnerValue(int value) {
brandLiftIntegrationPartner_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Allowed third party integration partners for Brand Lift verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner brand_lift_integration_partner = 1;</code>
* @return The brandLiftIntegrationPartner.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner getBrandLiftIntegrationPartner() {
com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner result = com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner.forNumber(brandLiftIntegrationPartner_);
return result == null ? com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner.UNRECOGNIZED : result;
}
/**
* <pre>
* Allowed third party integration partners for Brand Lift verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner brand_lift_integration_partner = 1;</code>
* @param value The brandLiftIntegrationPartner to set.
* @return This builder for chaining.
*/
public Builder setBrandLiftIntegrationPartner(com.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
brandLiftIntegrationPartner_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Allowed third party integration partners for Brand Lift verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyBrandLiftIntegrationPartnerEnum.ThirdPartyBrandLiftIntegrationPartner brand_lift_integration_partner = 1;</code>
* @return This builder for chaining.
*/
public Builder clearBrandLiftIntegrationPartner() {
bitField0_ = (bitField0_ & ~0x00000001);
brandLiftIntegrationPartner_ = 0;
onChanged();
return this;
}
private com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brandLiftIntegrationPartnerData_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder> brandLiftIntegrationPartnerDataBuilder_;
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
* @return Whether the brandLiftIntegrationPartnerData field is set.
*/
public boolean hasBrandLiftIntegrationPartnerData() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
* @return The brandLiftIntegrationPartnerData.
*/
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData getBrandLiftIntegrationPartnerData() {
if (brandLiftIntegrationPartnerDataBuilder_ == null) {
return brandLiftIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandLiftIntegrationPartnerData_;
} else {
return brandLiftIntegrationPartnerDataBuilder_.getMessage();
}
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
public Builder setBrandLiftIntegrationPartnerData(com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData value) {
if (brandLiftIntegrationPartnerDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
brandLiftIntegrationPartnerData_ = value;
} else {
brandLiftIntegrationPartnerDataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
public Builder setBrandLiftIntegrationPartnerData(
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder builderForValue) {
if (brandLiftIntegrationPartnerDataBuilder_ == null) {
brandLiftIntegrationPartnerData_ = builderForValue.build();
} else {
brandLiftIntegrationPartnerDataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
public Builder mergeBrandLiftIntegrationPartnerData(com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData value) {
if (brandLiftIntegrationPartnerDataBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
brandLiftIntegrationPartnerData_ != null &&
brandLiftIntegrationPartnerData_ != com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance()) {
getBrandLiftIntegrationPartnerDataBuilder().mergeFrom(value);
} else {
brandLiftIntegrationPartnerData_ = value;
}
} else {
brandLiftIntegrationPartnerDataBuilder_.mergeFrom(value);
}
if (brandLiftIntegrationPartnerData_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
public Builder clearBrandLiftIntegrationPartnerData() {
bitField0_ = (bitField0_ & ~0x00000002);
brandLiftIntegrationPartnerData_ = null;
if (brandLiftIntegrationPartnerDataBuilder_ != null) {
brandLiftIntegrationPartnerDataBuilder_.dispose();
brandLiftIntegrationPartnerDataBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder getBrandLiftIntegrationPartnerDataBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getBrandLiftIntegrationPartnerDataFieldBuilder().getBuilder();
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder getBrandLiftIntegrationPartnerDataOrBuilder() {
if (brandLiftIntegrationPartnerDataBuilder_ != null) {
return brandLiftIntegrationPartnerDataBuilder_.getMessageOrBuilder();
} else {
return brandLiftIntegrationPartnerData_ == null ?
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandLiftIntegrationPartnerData_;
}
}
/**
* <pre>
* Third party partner data for YouTube Brand Lift verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_lift_integration_partner_data = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder>
getBrandLiftIntegrationPartnerDataFieldBuilder() {
if (brandLiftIntegrationPartnerDataBuilder_ == null) {
brandLiftIntegrationPartnerDataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder>(
getBrandLiftIntegrationPartnerData(),
getParentForChildren(),
isClean());
brandLiftIntegrationPartnerData_ = null;
}
return brandLiftIntegrationPartnerDataBuilder_;
}
private boolean shareCost_ ;
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @return The shareCost.
*/
@java.lang.Override
public boolean getShareCost() {
return shareCost_;
}
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @param value The shareCost to set.
* @return This builder for chaining.
*/
public Builder setShareCost(boolean value) {
shareCost_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @return This builder for chaining.
*/
public Builder clearShareCost() {
bitField0_ = (bitField0_ & ~0x00000004);
shareCost_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner)
private static final com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner();
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignThirdPartyBrandLiftIntegrationPartner>
PARSER = new com.google.protobuf.AbstractParser<CampaignThirdPartyBrandLiftIntegrationPartner>() {
@java.lang.Override
public CampaignThirdPartyBrandLiftIntegrationPartner parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignThirdPartyBrandLiftIntegrationPartner> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignThirdPartyBrandLiftIntegrationPartner> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandLiftIntegrationPartner getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,487 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/ListIntentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/intent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The response message for
* [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListIntentsResponse}
*/
public final class ListIntentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ListIntentsResponse)
ListIntentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListIntentsResponse.newBuilder() to construct.
private ListIntentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListIntentsResponse() {
intents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListIntentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.IntentProto
.internal_static_google_cloud_dialogflow_v2_ListIntentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.IntentProto
.internal_static_google_cloud_dialogflow_v2_ListIntentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListIntentsResponse.class,
com.google.cloud.dialogflow.v2.ListIntentsResponse.Builder.class);
}
public static final int INTENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2.Intent> intents_;
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2.Intent> getIntentsList() {
return intents_;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2.IntentOrBuilder>
getIntentsOrBuilderList() {
return intents_;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
@java.lang.Override
public int getIntentsCount() {
return intents_.size();
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.Intent getIntents(int index) {
return intents_.get(index);
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.IntentOrBuilder getIntentsOrBuilder(int index) {
return intents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < intents_.size(); i++) {
output.writeMessage(1, intents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < intents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, intents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.ListIntentsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.ListIntentsResponse other =
(com.google.cloud.dialogflow.v2.ListIntentsResponse) obj;
if (!getIntentsList().equals(other.getIntentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIntentsCount() > 0) {
hash = (37 * hash) + INTENTS_FIELD_NUMBER;
hash = (53 * hash) + getIntentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.v2.ListIntentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListIntentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ListIntentsResponse)
com.google.cloud.dialogflow.v2.ListIntentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.IntentProto
.internal_static_google_cloud_dialogflow_v2_ListIntentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.IntentProto
.internal_static_google_cloud_dialogflow_v2_ListIntentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListIntentsResponse.class,
com.google.cloud.dialogflow.v2.ListIntentsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.ListIntentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (intentsBuilder_ == null) {
intents_ = java.util.Collections.emptyList();
} else {
intents_ = null;
intentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.IntentProto
.internal_static_google_cloud_dialogflow_v2_ListIntentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListIntentsResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.ListIntentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListIntentsResponse build() {
com.google.cloud.dialogflow.v2.ListIntentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListIntentsResponse buildPartial() {
com.google.cloud.dialogflow.v2.ListIntentsResponse result =
new com.google.cloud.dialogflow.v2.ListIntentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2.ListIntentsResponse result) {
if (intentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
intents_ = java.util.Collections.unmodifiableList(intents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.intents_ = intents_;
} else {
result.intents_ = intentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2.ListIntentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.ListIntentsResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2.ListIntentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.ListIntentsResponse other) {
if (other == com.google.cloud.dialogflow.v2.ListIntentsResponse.getDefaultInstance())
return this;
if (intentsBuilder_ == null) {
if (!other.intents_.isEmpty()) {
if (intents_.isEmpty()) {
intents_ = other.intents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIntentsIsMutable();
intents_.addAll(other.intents_);
}
onChanged();
}
} else {
if (!other.intents_.isEmpty()) {
if (intentsBuilder_.isEmpty()) {
intentsBuilder_.dispose();
intentsBuilder_ = null;
intents_ = other.intents_;
bitField0_ = (bitField0_ & ~0x00000001);
intentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIntentsFieldBuilder()
: null;
} else {
intentsBuilder_.addAllMessages(other.intents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2.Intent m =
input.readMessage(
com.google.cloud.dialogflow.v2.Intent.parser(), extensionRegistry);
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(m);
} else {
intentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2.Intent> intents_ =
java.util.Collections.emptyList();
private void ensureIntentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
intents_ = new java.util.ArrayList<com.google.cloud.dialogflow.v2.Intent>(intents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Intent,
com.google.cloud.dialogflow.v2.Intent.Builder,
com.google.cloud.dialogflow.v2.IntentOrBuilder>
intentsBuilder_;
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.Intent> getIntentsList() {
if (intentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(intents_);
} else {
return intentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public int getIntentsCount() {
if (intentsBuilder_ == null) {
return intents_.size();
} else {
return intentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Intent getIntents(int index) {
if (intentsBuilder_ == null) {
return intents_.get(index);
} else {
return intentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder setIntents(int index, com.google.cloud.dialogflow.v2.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.set(index, value);
onChanged();
} else {
intentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder setIntents(
int index, com.google.cloud.dialogflow.v2.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.set(index, builderForValue.build());
onChanged();
} else {
intentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder addIntents(com.google.cloud.dialogflow.v2.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.add(value);
onChanged();
} else {
intentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder addIntents(int index, com.google.cloud.dialogflow.v2.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.add(index, value);
onChanged();
} else {
intentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder addIntents(com.google.cloud.dialogflow.v2.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(builderForValue.build());
onChanged();
} else {
intentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder addIntents(
int index, com.google.cloud.dialogflow.v2.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(index, builderForValue.build());
onChanged();
} else {
intentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder addAllIntents(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2.Intent> values) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, intents_);
onChanged();
} else {
intentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder clearIntents() {
if (intentsBuilder_ == null) {
intents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
intentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public Builder removeIntents(int index) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.remove(index);
onChanged();
} else {
intentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Intent.Builder getIntentsBuilder(int index) {
return getIntentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.IntentOrBuilder getIntentsOrBuilder(int index) {
if (intentsBuilder_ == null) {
return intents_.get(index);
} else {
return intentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2.IntentOrBuilder>
getIntentsOrBuilderList() {
if (intentsBuilder_ != null) {
return intentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(intents_);
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Intent.Builder addIntentsBuilder() {
return getIntentsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2.Intent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Intent.Builder addIntentsBuilder(int index) {
return getIntentsFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.v2.Intent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Intent intents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.Intent.Builder> getIntentsBuilderList() {
return getIntentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Intent,
com.google.cloud.dialogflow.v2.Intent.Builder,
com.google.cloud.dialogflow.v2.IntentOrBuilder>
getIntentsFieldBuilder() {
if (intentsBuilder_ == null) {
intentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Intent,
com.google.cloud.dialogflow.v2.Intent.Builder,
com.google.cloud.dialogflow.v2.IntentOrBuilder>(
intents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
intents_ = null;
}
return intentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ListIntentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ListIntentsResponse)
private static final com.google.cloud.dialogflow.v2.ListIntentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ListIntentsResponse();
}
public static com.google.cloud.dialogflow.v2.ListIntentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListIntentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListIntentsResponse>() {
@java.lang.Override
public ListIntentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListIntentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListIntentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListIntentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,545 | java-biglake/proto-google-cloud-biglake-v1/src/main/java/com/google/cloud/bigquery/biglake/v1/UpdateDatabaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1;
/**
*
*
* <pre>
* Request message for the UpdateDatabase method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest}
*/
public final class UpdateDatabaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest)
UpdateDatabaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateDatabaseRequest.newBuilder() to construct.
private UpdateDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateDatabaseRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateDatabaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest.class,
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest.Builder.class);
}
private int bitField0_;
public static final int DATABASE_FIELD_NUMBER = 1;
private com.google.cloud.bigquery.biglake.v1.Database database_;
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
@java.lang.Override
public boolean hasDatabase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.Database getDatabase() {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder getDatabaseOrBuilder() {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getDatabase());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDatabase());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest other =
(com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest) obj;
if (hasDatabase() != other.hasDatabase()) return false;
if (hasDatabase()) {
if (!getDatabase().equals(other.getDatabase())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasDatabase()) {
hash = (37 * hash) + DATABASE_FIELD_NUMBER;
hash = (53 * hash) + getDatabase().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the UpdateDatabase method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest)
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest.class,
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDatabaseFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateDatabaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest build() {
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest buildPartial() {
com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest result =
new com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.database_ = databaseBuilder_ == null ? database_ : databaseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest other) {
if (other == com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest.getDefaultInstance())
return this;
if (other.hasDatabase()) {
mergeDatabase(other.getDatabase());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getDatabaseFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.bigquery.biglake.v1.Database database_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
databaseBuilder_;
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
public boolean hasDatabase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
public com.google.cloud.bigquery.biglake.v1.Database getDatabase() {
if (databaseBuilder_ == null) {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
} else {
return databaseBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(com.google.cloud.bigquery.biglake.v1.Database value) {
if (databaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
database_ = value;
} else {
databaseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(
com.google.cloud.bigquery.biglake.v1.Database.Builder builderForValue) {
if (databaseBuilder_ == null) {
database_ = builderForValue.build();
} else {
databaseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDatabase(com.google.cloud.bigquery.biglake.v1.Database value) {
if (databaseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& database_ != null
&& database_ != com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()) {
getDatabaseBuilder().mergeFrom(value);
} else {
database_ = value;
}
} else {
databaseBuilder_.mergeFrom(value);
}
if (database_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDatabase() {
bitField0_ = (bitField0_ & ~0x00000001);
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1.Database.Builder getDatabaseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getDatabaseFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder getDatabaseOrBuilder() {
if (databaseBuilder_ != null) {
return databaseBuilder_.getMessageOrBuilder();
} else {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
}
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
getDatabaseFieldBuilder() {
if (databaseBuilder_ == null) {
databaseBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>(
getDatabase(), getParentForChildren(), isClean());
database_ = null;
}
return databaseBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest)
private static final com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest();
}
public static com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateDatabaseRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateDatabaseRequest>() {
@java.lang.Override
public UpdateDatabaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateDatabaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateDatabaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateDatabaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/tajo | 37,279 | tajo-storage/tajo-storage-hdfs/src/test/java/org/apache/tajo/storage/index/TestBSTIndex.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.storage.index;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.tajo.BuiltinStorages;
import org.apache.tajo.catalog.*;
import org.apache.tajo.common.TajoDataTypes.Type;
import org.apache.tajo.conf.TajoConf;
import org.apache.tajo.datum.DatumFactory;
import org.apache.tajo.storage.*;
import org.apache.tajo.storage.fragment.FileFragment;
import org.apache.tajo.storage.index.bst.BSTIndex;
import org.apache.tajo.storage.index.bst.BSTIndex.BSTIndexReader;
import org.apache.tajo.storage.index.bst.BSTIndex.BSTIndexWriter;
import org.apache.tajo.util.CommonTestingUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Random;
import static org.junit.Assert.*;
@RunWith(Parameterized.class)
public class TestBSTIndex {
private TajoConf conf;
private Schema schema;
private TableMeta meta;
private static final int TUPLE_NUM = 10000;
private static final int LOAD_NUM = 100;
private static final String TEST_PATH = "target/test-data/TestIndex";
private Path testDir;
private FileSystem fs;
private String dataFormat;
public TestBSTIndex(String type) {
this.dataFormat = type;
conf = new TajoConf();
conf.setVar(TajoConf.ConfVars.ROOT_DIR, TEST_PATH);
schema = SchemaBuilder.builder()
.add(new Column("int", Type.INT4))
.add(new Column("long", Type.INT8))
.add(new Column("double", Type.FLOAT8))
.add(new Column("float", Type.FLOAT4))
.add(new Column("string", Type.TEXT))
.build();
}
@Parameters(name = "{index}: {0}")
public static Collection<Object[]> generateParameters() {
return Arrays.asList(new Object[][]{
{BuiltinStorages.RAW},
{BuiltinStorages.TEXT}
});
}
@Before
public void setUp() throws Exception {
testDir = CommonTestingUtil.getTestDir(TEST_PATH);
fs = testDir.getFileSystem(conf);
}
@Test
public void testFindValue() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testFindValue_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = 0; i < TUPLE_NUM; i++) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("long"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("double"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("long", Type.INT8))
.add(new Column("double", Type.FLOAT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testFindValue_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX,
keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(1));
keyTuple.put(1, tuple.asDatum(2));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
tuple = new VTuple(keySchema.size());
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindValue_" + dataFormat + ".idx"), keySchema, comp);
reader.init();
scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
for (int i = 0; i < TUPLE_NUM - 1; i++) {
tuple.put(0, DatumFactory.createInt8(i));
tuple.put(1, DatumFactory.createFloat8(i));
long offsets = reader.find(tuple);
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("seek check [" + (i) + " ," + (tuple.getInt8(1)) + "]", (i) == (tuple.getInt8(1)));
assertTrue("seek check [" + (i) + " ," + (tuple.getFloat8(2)) + "]", (i) == (tuple.getFloat8(2)));
offsets = reader.next();
if (offsets == -1) {
continue;
}
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (tuple.getInt4(0)));
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (tuple.getInt8(1)));
}
reader.close();
scanner.close();
}
@Test
public void testBuildIndexWithAppender() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testBuildIndexWithAppender_" + dataFormat);
FileAppender appender = (FileAppender) ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("long"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("double"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("long", Type.INT8))
.add(new Column("double", Type.FLOAT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testBuildIndexWithAppender_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
Tuple tuple;
long offset;
for (int i = 0; i < TUPLE_NUM; i++) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
offset = appender.getOffset();
appender.addTuple(tuple);
creater.write(tuple, offset);
}
appender.flush();
appender.close();
creater.flush();
creater.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
tuple = new VTuple(keySchema.size());
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testBuildIndexWithAppender_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
for (int i = 0; i < TUPLE_NUM - 1; i++) {
tuple.put(0, DatumFactory.createInt8(i));
tuple.put(1, DatumFactory.createFloat8(i));
long offsets = reader.find(tuple);
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("[seek check " + (i) + " ]", (i) == (tuple.getInt8(1)));
assertTrue("[seek check " + (i) + " ]", (i) == (tuple.getFloat8(2)));
offsets = reader.next();
if (offsets == -1) {
continue;
}
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (tuple.getInt4(0)));
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (tuple.getInt8(1)));
}
reader.close();
scanner.close();
}
@Test
public void testFindOmittedValue() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = StorageUtil.concatPath(testDir, "testFindOmittedValue_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs()).getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = 0; i < TUPLE_NUM; i += 2) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, status.getLen());
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("long"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("double"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("long", Type.INT8))
.add(new Column("double", Type.FLOAT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testFindOmittedValue_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(1));
keyTuple.put(1, tuple.asDatum(2));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindOmittedValue_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
for (int i = 1; i < TUPLE_NUM - 1; i += 2) {
keyTuple.put(0, DatumFactory.createInt8(i));
keyTuple.put(1, DatumFactory.createFloat8(i));
long offsets = reader.find(keyTuple);
assertEquals(-1, offsets);
}
reader.close();
}
@Test
public void testFindNextKeyValue() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testFindNextKeyValue_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = 0; i < TUPLE_NUM; i++) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("int"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("long"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("int", Type.INT4))
.add(new Column("long", Type.INT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testFindNextKeyValue_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(0));
keyTuple.put(1, tuple.asDatum(1));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindNextKeyValue_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple result;
for (int i = 0; i < TUPLE_NUM - 1; i++) {
keyTuple = new VTuple(2);
keyTuple.put(0, DatumFactory.createInt4(i));
keyTuple.put(1, DatumFactory.createInt8(i));
long offsets = reader.find(keyTuple, true);
scanner.seek(offsets);
result = scanner.next();
assertTrue("[seek check " + (i + 1) + " ]",
(i + 1) == (result.getInt4(0)));
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (result.getInt8(1)));
offsets = reader.next();
if (offsets == -1) {
continue;
}
scanner.seek(offsets);
result = scanner.next();
assertTrue("[seek check " + (i + 2) + " ]", (i + 2) == (result.getInt8(0)));
assertTrue("[seek check " + (i + 2) + " ]", (i + 2) == (result.getFloat8(1)));
}
reader.close();
scanner.close();
}
@Test
public void testFindNextKeyOmittedValue() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testFindNextKeyOmittedValue_" + dataFormat);
Appender appender = (((FileTablespace) TablespaceManager.getLocalFs()))
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = 0; i < TUPLE_NUM; i += 2) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("int"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("long"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("int", Type.INT4))
.add(new Column("long", Type.INT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testFindNextKeyOmittedValue_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(0));
keyTuple.put(1, tuple.asDatum(1));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindNextKeyOmittedValue_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple result;
for (int i = 1; i < TUPLE_NUM - 1; i += 2) {
keyTuple = new VTuple(2);
keyTuple.put(0, DatumFactory.createInt4(i));
keyTuple.put(1, DatumFactory.createInt8(i));
long offsets = reader.find(keyTuple, true);
scanner.seek(offsets);
result = scanner.next();
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (result.getInt4(0)));
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (result.getInt8(1)));
}
scanner.close();
}
@Test
public void testFindMinValue() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testFindMinValue" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = 5; i < TUPLE_NUM + 5; i++) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("long"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("double"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("long", Type.INT8))
.add(new Column("double", Type.FLOAT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testFindMinValue_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(1));
keyTuple.put(1, tuple.asDatum(2));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
tuple = new VTuple(keySchema.size());
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindMinValue_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
tuple.put(0, DatumFactory.createInt8(0));
tuple.put(1, DatumFactory.createFloat8(0));
offset = reader.find(tuple);
assertEquals(-1, offset);
offset = reader.find(tuple, true);
assertTrue(offset >= 0);
scanner.seek(offset);
tuple = scanner.next();
assertEquals(5, tuple.getInt4(1));
assertEquals(5l, tuple.getInt8(2));
reader.close();
scanner.close();
}
@Test
public void testMinMax() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testMinMax_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = 5; i < TUPLE_NUM; i += 2) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("int"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("long"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("int", Type.INT4))
.add(new Column("long", Type.INT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testMinMax_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(0));
keyTuple.put(1, tuple.asDatum(1));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testMinMax_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
Tuple min = reader.getFirstKey();
assertEquals(5, min.getInt4(0));
assertEquals(5l, min.getInt8(0));
Tuple max = reader.getLastKey();
assertEquals(TUPLE_NUM - 1, max.getInt4(0));
assertEquals(TUPLE_NUM - 1, max.getInt8(0));
reader.close();
}
private class ConcurrentAccessor implements Runnable {
final BSTIndexReader reader;
final Random rnd = new Random(System.currentTimeMillis());
boolean failed = false;
ConcurrentAccessor(BSTIndexReader reader) {
this.reader = reader;
}
public boolean isFailed() {
return this.failed;
}
@Override
public void run() {
Tuple findKey = new VTuple(2);
int keyVal;
for (int i = 0; i < 10000; i++) {
keyVal = rnd.nextInt(10000);
findKey.put(0, DatumFactory.createInt4(keyVal));
findKey.put(1, DatumFactory.createInt8(keyVal));
try {
assertTrue(reader.find(findKey) != -1);
} catch (Exception e) {
e.printStackTrace();
this.failed = true;
}
}
}
}
@Test
public void testConcurrentAccess() throws IOException, InterruptedException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testConcurrentAccess_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = 0; i < TUPLE_NUM; i++) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("int"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("long"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("int", Type.INT4))
.add(new Column("long", Type.INT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testConcurrentAccess_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(0));
keyTuple.put(1, tuple.asDatum(1));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testConcurrentAccess_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
Thread[] threads = new Thread[5];
ConcurrentAccessor[] accs = new ConcurrentAccessor[5];
for (int i = 0; i < threads.length; i++) {
accs[i] = new ConcurrentAccessor(reader);
threads[i] = new Thread(accs[i]);
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
assertFalse(accs[i].isFailed());
}
reader.close();
}
@Test
public void testFindValueDescOrder() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testFindValueDescOrder_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = (TUPLE_NUM - 1); i >= 0; i--) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("long"), false, false);
sortKeys[1] = new SortSpec(schema.getColumn("double"), false, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("long", Type.INT8))
.add(new Column("double", Type.FLOAT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testFindValueDescOrder_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX, keySchema, comp, true);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(1));
keyTuple.put(1, tuple.asDatum(2));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
tuple = new VTuple(keySchema.size());
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindValueDescOrder_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
for (int i = (TUPLE_NUM - 1); i > 0; i--) {
tuple.put(0, DatumFactory.createInt8(i));
tuple.put(1, DatumFactory.createFloat8(i));
long offsets = reader.find(tuple);
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("seek check [" + (i) + " ," + (tuple.getInt8(1)) + "]", (i) == (tuple.getInt8(1)));
assertTrue("seek check [" + (i) + " ," + (tuple.getFloat8(2)) + "]", (i) == (tuple.getFloat8(2)));
offsets = reader.next();
if (offsets == -1) {
continue;
}
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("[seek check " + (i - 1) + " ]", (i - 1) == (tuple.getInt4(0)));
assertTrue("[seek check " + (i - 1) + " ]", (i - 1) == (tuple.getInt8(1)));
}
reader.close();
scanner.close();
}
@Test
public void testFindNextKeyValueDescOrder() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testFindNextKeyValueDescOrder_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs()).getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
for (int i = (TUPLE_NUM - 1); i >= 0; i--) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("int"), false, false);
sortKeys[1] = new SortSpec(schema.getColumn("long"), false, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("int", Type.INT4))
.add(new Column("long", Type.INT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir,
"testFindNextKeyValueDescOrder_" + dataFormat + ".idx"), BSTIndex.TWO_LEVEL_INDEX, keySchema, comp);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(0));
keyTuple.put(1, tuple.asDatum(1));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindNextKeyValueDescOrder_" + dataFormat + ".idx"),
keySchema, comp);
reader.init();
assertEquals(keySchema, reader.getKeySchema());
assertEquals(comp, reader.getComparator());
scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple result;
for (int i = (TUPLE_NUM - 1); i > 0; i--) {
keyTuple = new VTuple(2);
keyTuple.put(0, DatumFactory.createInt4(i));
keyTuple.put(1, DatumFactory.createInt8(i));
long offsets = reader.find(keyTuple, true);
scanner.seek(offsets);
result = scanner.next();
assertTrue("[seek check " + (i - 1) + " ]",
(i - 1) == (result.getInt4(0)));
assertTrue("[seek check " + (i - 1) + " ]", (i - 1) == (result.getInt8(1)));
offsets = reader.next();
if (offsets == -1) {
continue;
}
scanner.seek(offsets);
result = scanner.next();
assertTrue("[seek check " + (i - 2) + " ]", (i - 2) == (result.getInt8(0)));
assertTrue("[seek check " + (i - 2) + " ]", (i - 2) == (result.getFloat8(1)));
}
reader.close();
scanner.close();
}
@Test
public void testFindValueASCOrder() throws IOException {
meta = CatalogUtil.newTableMeta(dataFormat, conf);
Path tablePath = new Path(testDir, "testFindValue_" + dataFormat);
Appender appender = ((FileTablespace) TablespaceManager.getLocalFs())
.getAppender(meta, schema, tablePath);
appender.init();
Tuple tuple;
// order by asc
for (int i = 0; i < TUPLE_NUM; i++) {
tuple = new VTuple(5);
tuple.put(0, DatumFactory.createInt4(i));
tuple.put(1, DatumFactory.createInt8(i));
tuple.put(2, DatumFactory.createFloat8(i));
tuple.put(3, DatumFactory.createFloat4(i));
tuple.put(4, DatumFactory.createText("field_" + i));
appender.addTuple(tuple);
}
appender.close();
FileStatus status = fs.getFileStatus(tablePath);
long fileLen = status.getLen();
FileFragment tablet = new FileFragment("table1_1", status.getPath(), 0, fileLen);
SortSpec[] sortKeys = new SortSpec[2];
sortKeys[0] = new SortSpec(schema.getColumn("long"), true, false);
sortKeys[1] = new SortSpec(schema.getColumn("double"), true, false);
Schema keySchema = SchemaBuilder.builder()
.add(new Column("long", Type.INT8))
.add(new Column("double", Type.FLOAT8))
.build();
BaseTupleComparator comp = new BaseTupleComparator(keySchema, sortKeys);
BSTIndex bst = new BSTIndex(conf);
BSTIndexWriter creater = bst.getIndexWriter(new Path(testDir, "testFindValue_" + dataFormat + ".idx"),
BSTIndex.TWO_LEVEL_INDEX,
keySchema, comp, true);
creater.setLoadNum(LOAD_NUM);
creater.init();
SeekableScanner scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
Tuple keyTuple;
long offset;
while (true) {
keyTuple = new VTuple(2);
offset = scanner.getNextOffset();
tuple = scanner.next();
if (tuple == null) break;
keyTuple.put(0, tuple.asDatum(1));
keyTuple.put(1, tuple.asDatum(2));
creater.write(keyTuple, offset);
}
creater.flush();
creater.close();
scanner.close();
tuple = new VTuple(keySchema.size());
BSTIndexReader reader = bst.getIndexReader(new Path(testDir, "testFindValue_" + dataFormat + ".idx"), keySchema, comp);
reader.init();
scanner = OldStorageManager.getStorageManager(conf, meta.getDataFormat()).
getSeekableScanner(meta, schema, tablet, schema);
scanner.init();
for (int i = 0; i < TUPLE_NUM - 1; i++) {
tuple.put(0, DatumFactory.createInt8(i));
tuple.put(1, DatumFactory.createFloat8(i));
long offsets = reader.find(tuple);
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("seek check [" + (i) + " ," + (tuple.getInt8(1)) + "]", (i) == (tuple.getInt8(1)));
assertTrue("seek check [" + (i) + " ," + (tuple.getFloat8(2)) + "]", (i) == (tuple.getFloat8(2)));
offsets = reader.next();
if (offsets == -1) {
continue;
}
scanner.seek(offsets);
tuple = scanner.next();
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (tuple.getInt4(0)));
assertTrue("[seek check " + (i + 1) + " ]", (i + 1) == (tuple.getInt8(1)));
}
reader.close();
scanner.close();
}
}
|
googleads/google-ads-java | 37,514 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/enums/CampaignPrimaryStatusReasonEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/enums/campaign_primary_status_reason.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.enums;
/**
* <pre>
* Container for enum describing possible campaign primary status reasons.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum}
*/
public final class CampaignPrimaryStatusReasonEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum)
CampaignPrimaryStatusReasonEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignPrimaryStatusReasonEnum.newBuilder() to construct.
private CampaignPrimaryStatusReasonEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignPrimaryStatusReasonEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignPrimaryStatusReasonEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v19_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v19_enums_CampaignPrimaryStatusReasonEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.class, com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.Builder.class);
}
/**
* <pre>
* Enum describing the possible campaign primary status reasons. Provides
* insight into why a campaign is not serving or not serving optimally. These
* reasons are aggregated to determine an overall campaign primary status.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.CampaignPrimaryStatusReason}
*/
public enum CampaignPrimaryStatusReason
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* The user-specified campaign status is removed.
* </pre>
*
* <code>CAMPAIGN_REMOVED = 2;</code>
*/
CAMPAIGN_REMOVED(2),
/**
* <pre>
* The user-specified campaign status is paused.
* </pre>
*
* <code>CAMPAIGN_PAUSED = 3;</code>
*/
CAMPAIGN_PAUSED(3),
/**
* <pre>
* The user-specified time for this campaign to start is in the future.
* </pre>
*
* <code>CAMPAIGN_PENDING = 4;</code>
*/
CAMPAIGN_PENDING(4),
/**
* <pre>
* The user-specified time for this campaign to end has passed.
* </pre>
*
* <code>CAMPAIGN_ENDED = 5;</code>
*/
CAMPAIGN_ENDED(5),
/**
* <pre>
* The campaign is a draft.
* </pre>
*
* <code>CAMPAIGN_DRAFT = 6;</code>
*/
CAMPAIGN_DRAFT(6),
/**
* <pre>
* The bidding strategy has incorrect user-specified settings.
* </pre>
*
* <code>BIDDING_STRATEGY_MISCONFIGURED = 7;</code>
*/
BIDDING_STRATEGY_MISCONFIGURED(7),
/**
* <pre>
* The bidding strategy is limited by user-specified settings such as lack
* of data or similar.
* </pre>
*
* <code>BIDDING_STRATEGY_LIMITED = 8;</code>
*/
BIDDING_STRATEGY_LIMITED(8),
/**
* <pre>
* The automated bidding system is adjusting to user-specified changes to
* the bidding strategy.
* </pre>
*
* <code>BIDDING_STRATEGY_LEARNING = 9;</code>
*/
BIDDING_STRATEGY_LEARNING(9),
/**
* <pre>
* Campaign could capture more conversion value by adjusting CPA/ROAS
* targets.
* </pre>
*
* <code>BIDDING_STRATEGY_CONSTRAINED = 10;</code>
*/
BIDDING_STRATEGY_CONSTRAINED(10),
/**
* <pre>
* The budget is limiting the campaign's ability to serve.
* </pre>
*
* <code>BUDGET_CONSTRAINED = 11;</code>
*/
BUDGET_CONSTRAINED(11),
/**
* <pre>
* The budget has incorrect user-specified settings.
* </pre>
*
* <code>BUDGET_MISCONFIGURED = 12;</code>
*/
BUDGET_MISCONFIGURED(12),
/**
* <pre>
* Campaign is not targeting all relevant queries.
* </pre>
*
* <code>SEARCH_VOLUME_LIMITED = 13;</code>
*/
SEARCH_VOLUME_LIMITED(13),
/**
* <pre>
* The user-specified ad group statuses are all paused.
* </pre>
*
* <code>AD_GROUPS_PAUSED = 14;</code>
*/
AD_GROUPS_PAUSED(14),
/**
* <pre>
* No eligible ad groups exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUPS = 15;</code>
*/
NO_AD_GROUPS(15),
/**
* <pre>
* The user-specified keyword statuses are all paused.
* </pre>
*
* <code>KEYWORDS_PAUSED = 16;</code>
*/
KEYWORDS_PAUSED(16),
/**
* <pre>
* No eligible keywords exist in this campaign.
* </pre>
*
* <code>NO_KEYWORDS = 17;</code>
*/
NO_KEYWORDS(17),
/**
* <pre>
* The user-specified ad group ad statuses are all paused.
* </pre>
*
* <code>AD_GROUP_ADS_PAUSED = 18;</code>
*/
AD_GROUP_ADS_PAUSED(18),
/**
* <pre>
* No eligible ad group ads exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUP_ADS = 19;</code>
*/
NO_AD_GROUP_ADS(19),
/**
* <pre>
* At least one ad in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ADS_LIMITED_BY_POLICY = 20;</code>
*/
HAS_ADS_LIMITED_BY_POLICY(20),
/**
* <pre>
* At least one ad in this campaign is disapproved.
* </pre>
*
* <code>HAS_ADS_DISAPPROVED = 21;</code>
*/
HAS_ADS_DISAPPROVED(21),
/**
* <pre>
* Most ads in this campaign are pending review.
* </pre>
*
* <code>MOST_ADS_UNDER_REVIEW = 22;</code>
*/
MOST_ADS_UNDER_REVIEW(22),
/**
* <pre>
* The campaign has a lead form goal, and the lead form extension is
* missing.
* </pre>
*
* <code>MISSING_LEAD_FORM_EXTENSION = 23;</code>
*/
MISSING_LEAD_FORM_EXTENSION(23),
/**
* <pre>
* The campaign has a call goal, and the call extension is missing.
* </pre>
*
* <code>MISSING_CALL_EXTENSION = 24;</code>
*/
MISSING_CALL_EXTENSION(24),
/**
* <pre>
* The lead form extension is under review.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_UNDER_REVIEW = 25;</code>
*/
LEAD_FORM_EXTENSION_UNDER_REVIEW(25),
/**
* <pre>
* The lead extension is disapproved.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_DISAPPROVED = 26;</code>
*/
LEAD_FORM_EXTENSION_DISAPPROVED(26),
/**
* <pre>
* The call extension is under review.
* </pre>
*
* <code>CALL_EXTENSION_UNDER_REVIEW = 27;</code>
*/
CALL_EXTENSION_UNDER_REVIEW(27),
/**
* <pre>
* The call extension is disapproved.
* </pre>
*
* <code>CALL_EXTENSION_DISAPPROVED = 28;</code>
*/
CALL_EXTENSION_DISAPPROVED(28),
/**
* <pre>
* No eligible mobile application ad group criteria exist in this campaign.
* </pre>
*
* <code>NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA = 29;</code>
*/
NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA(29),
/**
* <pre>
* The user-specified campaign group status is paused.
* </pre>
*
* <code>CAMPAIGN_GROUP_PAUSED = 30;</code>
*/
CAMPAIGN_GROUP_PAUSED(30),
/**
* <pre>
* The user-specified times of all group budgets associated with the parent
* campaign group has passed.
* </pre>
*
* <code>CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED = 31;</code>
*/
CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED(31),
/**
* <pre>
* The app associated with this ACi campaign is not released in the target
* countries of the campaign.
* </pre>
*
* <code>APP_NOT_RELEASED = 32;</code>
*/
APP_NOT_RELEASED(32),
/**
* <pre>
* The app associated with this ACi campaign is partially released in the
* target countries of the campaign.
* </pre>
*
* <code>APP_PARTIALLY_RELEASED = 33;</code>
*/
APP_PARTIALLY_RELEASED(33),
/**
* <pre>
* At least one asset group in this campaign is disapproved.
* </pre>
*
* <code>HAS_ASSET_GROUPS_DISAPPROVED = 34;</code>
*/
HAS_ASSET_GROUPS_DISAPPROVED(34),
/**
* <pre>
* At least one asset group in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ASSET_GROUPS_LIMITED_BY_POLICY = 35;</code>
*/
HAS_ASSET_GROUPS_LIMITED_BY_POLICY(35),
/**
* <pre>
* Most asset groups in this campaign are pending review.
* </pre>
*
* <code>MOST_ASSET_GROUPS_UNDER_REVIEW = 36;</code>
*/
MOST_ASSET_GROUPS_UNDER_REVIEW(36),
/**
* <pre>
* No eligible asset groups exist in this campaign.
* </pre>
*
* <code>NO_ASSET_GROUPS = 37;</code>
*/
NO_ASSET_GROUPS(37),
/**
* <pre>
* All asset groups in this campaign are paused.
* </pre>
*
* <code>ASSET_GROUPS_PAUSED = 38;</code>
*/
ASSET_GROUPS_PAUSED(38),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* The user-specified campaign status is removed.
* </pre>
*
* <code>CAMPAIGN_REMOVED = 2;</code>
*/
public static final int CAMPAIGN_REMOVED_VALUE = 2;
/**
* <pre>
* The user-specified campaign status is paused.
* </pre>
*
* <code>CAMPAIGN_PAUSED = 3;</code>
*/
public static final int CAMPAIGN_PAUSED_VALUE = 3;
/**
* <pre>
* The user-specified time for this campaign to start is in the future.
* </pre>
*
* <code>CAMPAIGN_PENDING = 4;</code>
*/
public static final int CAMPAIGN_PENDING_VALUE = 4;
/**
* <pre>
* The user-specified time for this campaign to end has passed.
* </pre>
*
* <code>CAMPAIGN_ENDED = 5;</code>
*/
public static final int CAMPAIGN_ENDED_VALUE = 5;
/**
* <pre>
* The campaign is a draft.
* </pre>
*
* <code>CAMPAIGN_DRAFT = 6;</code>
*/
public static final int CAMPAIGN_DRAFT_VALUE = 6;
/**
* <pre>
* The bidding strategy has incorrect user-specified settings.
* </pre>
*
* <code>BIDDING_STRATEGY_MISCONFIGURED = 7;</code>
*/
public static final int BIDDING_STRATEGY_MISCONFIGURED_VALUE = 7;
/**
* <pre>
* The bidding strategy is limited by user-specified settings such as lack
* of data or similar.
* </pre>
*
* <code>BIDDING_STRATEGY_LIMITED = 8;</code>
*/
public static final int BIDDING_STRATEGY_LIMITED_VALUE = 8;
/**
* <pre>
* The automated bidding system is adjusting to user-specified changes to
* the bidding strategy.
* </pre>
*
* <code>BIDDING_STRATEGY_LEARNING = 9;</code>
*/
public static final int BIDDING_STRATEGY_LEARNING_VALUE = 9;
/**
* <pre>
* Campaign could capture more conversion value by adjusting CPA/ROAS
* targets.
* </pre>
*
* <code>BIDDING_STRATEGY_CONSTRAINED = 10;</code>
*/
public static final int BIDDING_STRATEGY_CONSTRAINED_VALUE = 10;
/**
* <pre>
* The budget is limiting the campaign's ability to serve.
* </pre>
*
* <code>BUDGET_CONSTRAINED = 11;</code>
*/
public static final int BUDGET_CONSTRAINED_VALUE = 11;
/**
* <pre>
* The budget has incorrect user-specified settings.
* </pre>
*
* <code>BUDGET_MISCONFIGURED = 12;</code>
*/
public static final int BUDGET_MISCONFIGURED_VALUE = 12;
/**
* <pre>
* Campaign is not targeting all relevant queries.
* </pre>
*
* <code>SEARCH_VOLUME_LIMITED = 13;</code>
*/
public static final int SEARCH_VOLUME_LIMITED_VALUE = 13;
/**
* <pre>
* The user-specified ad group statuses are all paused.
* </pre>
*
* <code>AD_GROUPS_PAUSED = 14;</code>
*/
public static final int AD_GROUPS_PAUSED_VALUE = 14;
/**
* <pre>
* No eligible ad groups exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUPS = 15;</code>
*/
public static final int NO_AD_GROUPS_VALUE = 15;
/**
* <pre>
* The user-specified keyword statuses are all paused.
* </pre>
*
* <code>KEYWORDS_PAUSED = 16;</code>
*/
public static final int KEYWORDS_PAUSED_VALUE = 16;
/**
* <pre>
* No eligible keywords exist in this campaign.
* </pre>
*
* <code>NO_KEYWORDS = 17;</code>
*/
public static final int NO_KEYWORDS_VALUE = 17;
/**
* <pre>
* The user-specified ad group ad statuses are all paused.
* </pre>
*
* <code>AD_GROUP_ADS_PAUSED = 18;</code>
*/
public static final int AD_GROUP_ADS_PAUSED_VALUE = 18;
/**
* <pre>
* No eligible ad group ads exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUP_ADS = 19;</code>
*/
public static final int NO_AD_GROUP_ADS_VALUE = 19;
/**
* <pre>
* At least one ad in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ADS_LIMITED_BY_POLICY = 20;</code>
*/
public static final int HAS_ADS_LIMITED_BY_POLICY_VALUE = 20;
/**
* <pre>
* At least one ad in this campaign is disapproved.
* </pre>
*
* <code>HAS_ADS_DISAPPROVED = 21;</code>
*/
public static final int HAS_ADS_DISAPPROVED_VALUE = 21;
/**
* <pre>
* Most ads in this campaign are pending review.
* </pre>
*
* <code>MOST_ADS_UNDER_REVIEW = 22;</code>
*/
public static final int MOST_ADS_UNDER_REVIEW_VALUE = 22;
/**
* <pre>
* The campaign has a lead form goal, and the lead form extension is
* missing.
* </pre>
*
* <code>MISSING_LEAD_FORM_EXTENSION = 23;</code>
*/
public static final int MISSING_LEAD_FORM_EXTENSION_VALUE = 23;
/**
* <pre>
* The campaign has a call goal, and the call extension is missing.
* </pre>
*
* <code>MISSING_CALL_EXTENSION = 24;</code>
*/
public static final int MISSING_CALL_EXTENSION_VALUE = 24;
/**
* <pre>
* The lead form extension is under review.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_UNDER_REVIEW = 25;</code>
*/
public static final int LEAD_FORM_EXTENSION_UNDER_REVIEW_VALUE = 25;
/**
* <pre>
* The lead extension is disapproved.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_DISAPPROVED = 26;</code>
*/
public static final int LEAD_FORM_EXTENSION_DISAPPROVED_VALUE = 26;
/**
* <pre>
* The call extension is under review.
* </pre>
*
* <code>CALL_EXTENSION_UNDER_REVIEW = 27;</code>
*/
public static final int CALL_EXTENSION_UNDER_REVIEW_VALUE = 27;
/**
* <pre>
* The call extension is disapproved.
* </pre>
*
* <code>CALL_EXTENSION_DISAPPROVED = 28;</code>
*/
public static final int CALL_EXTENSION_DISAPPROVED_VALUE = 28;
/**
* <pre>
* No eligible mobile application ad group criteria exist in this campaign.
* </pre>
*
* <code>NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA = 29;</code>
*/
public static final int NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA_VALUE = 29;
/**
* <pre>
* The user-specified campaign group status is paused.
* </pre>
*
* <code>CAMPAIGN_GROUP_PAUSED = 30;</code>
*/
public static final int CAMPAIGN_GROUP_PAUSED_VALUE = 30;
/**
* <pre>
* The user-specified times of all group budgets associated with the parent
* campaign group has passed.
* </pre>
*
* <code>CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED = 31;</code>
*/
public static final int CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED_VALUE = 31;
/**
* <pre>
* The app associated with this ACi campaign is not released in the target
* countries of the campaign.
* </pre>
*
* <code>APP_NOT_RELEASED = 32;</code>
*/
public static final int APP_NOT_RELEASED_VALUE = 32;
/**
* <pre>
* The app associated with this ACi campaign is partially released in the
* target countries of the campaign.
* </pre>
*
* <code>APP_PARTIALLY_RELEASED = 33;</code>
*/
public static final int APP_PARTIALLY_RELEASED_VALUE = 33;
/**
* <pre>
* At least one asset group in this campaign is disapproved.
* </pre>
*
* <code>HAS_ASSET_GROUPS_DISAPPROVED = 34;</code>
*/
public static final int HAS_ASSET_GROUPS_DISAPPROVED_VALUE = 34;
/**
* <pre>
* At least one asset group in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ASSET_GROUPS_LIMITED_BY_POLICY = 35;</code>
*/
public static final int HAS_ASSET_GROUPS_LIMITED_BY_POLICY_VALUE = 35;
/**
* <pre>
* Most asset groups in this campaign are pending review.
* </pre>
*
* <code>MOST_ASSET_GROUPS_UNDER_REVIEW = 36;</code>
*/
public static final int MOST_ASSET_GROUPS_UNDER_REVIEW_VALUE = 36;
/**
* <pre>
* No eligible asset groups exist in this campaign.
* </pre>
*
* <code>NO_ASSET_GROUPS = 37;</code>
*/
public static final int NO_ASSET_GROUPS_VALUE = 37;
/**
* <pre>
* All asset groups in this campaign are paused.
* </pre>
*
* <code>ASSET_GROUPS_PAUSED = 38;</code>
*/
public static final int ASSET_GROUPS_PAUSED_VALUE = 38;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CampaignPrimaryStatusReason valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CampaignPrimaryStatusReason forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CAMPAIGN_REMOVED;
case 3: return CAMPAIGN_PAUSED;
case 4: return CAMPAIGN_PENDING;
case 5: return CAMPAIGN_ENDED;
case 6: return CAMPAIGN_DRAFT;
case 7: return BIDDING_STRATEGY_MISCONFIGURED;
case 8: return BIDDING_STRATEGY_LIMITED;
case 9: return BIDDING_STRATEGY_LEARNING;
case 10: return BIDDING_STRATEGY_CONSTRAINED;
case 11: return BUDGET_CONSTRAINED;
case 12: return BUDGET_MISCONFIGURED;
case 13: return SEARCH_VOLUME_LIMITED;
case 14: return AD_GROUPS_PAUSED;
case 15: return NO_AD_GROUPS;
case 16: return KEYWORDS_PAUSED;
case 17: return NO_KEYWORDS;
case 18: return AD_GROUP_ADS_PAUSED;
case 19: return NO_AD_GROUP_ADS;
case 20: return HAS_ADS_LIMITED_BY_POLICY;
case 21: return HAS_ADS_DISAPPROVED;
case 22: return MOST_ADS_UNDER_REVIEW;
case 23: return MISSING_LEAD_FORM_EXTENSION;
case 24: return MISSING_CALL_EXTENSION;
case 25: return LEAD_FORM_EXTENSION_UNDER_REVIEW;
case 26: return LEAD_FORM_EXTENSION_DISAPPROVED;
case 27: return CALL_EXTENSION_UNDER_REVIEW;
case 28: return CALL_EXTENSION_DISAPPROVED;
case 29: return NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA;
case 30: return CAMPAIGN_GROUP_PAUSED;
case 31: return CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED;
case 32: return APP_NOT_RELEASED;
case 33: return APP_PARTIALLY_RELEASED;
case 34: return HAS_ASSET_GROUPS_DISAPPROVED;
case 35: return HAS_ASSET_GROUPS_LIMITED_BY_POLICY;
case 36: return MOST_ASSET_GROUPS_UNDER_REVIEW;
case 37: return NO_ASSET_GROUPS;
case 38: return ASSET_GROUPS_PAUSED;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CampaignPrimaryStatusReason>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
CampaignPrimaryStatusReason> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CampaignPrimaryStatusReason>() {
public CampaignPrimaryStatusReason findValueByNumber(int number) {
return CampaignPrimaryStatusReason.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.getDescriptor().getEnumTypes().get(0);
}
private static final CampaignPrimaryStatusReason[] VALUES = values();
public static CampaignPrimaryStatusReason valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CampaignPrimaryStatusReason(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.CampaignPrimaryStatusReason)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum other = (com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible campaign primary status reasons.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum)
com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v19_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v19_enums_CampaignPrimaryStatusReasonEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.class, com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v19_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum build() {
com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum buildPartial() {
com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum result = new com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum) {
return mergeFrom((com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum other) {
if (other == com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum)
private static final com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum();
}
public static com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum>
PARSER = new com.google.protobuf.AbstractParser<CampaignPrimaryStatusReasonEnum>() {
@java.lang.Override
public CampaignPrimaryStatusReasonEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.enums.CampaignPrimaryStatusReasonEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,514 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/enums/CampaignPrimaryStatusReasonEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/enums/campaign_primary_status_reason.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.enums;
/**
* <pre>
* Container for enum describing possible campaign primary status reasons.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum}
*/
public final class CampaignPrimaryStatusReasonEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum)
CampaignPrimaryStatusReasonEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignPrimaryStatusReasonEnum.newBuilder() to construct.
private CampaignPrimaryStatusReasonEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignPrimaryStatusReasonEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignPrimaryStatusReasonEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v20_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v20_enums_CampaignPrimaryStatusReasonEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.class, com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.Builder.class);
}
/**
* <pre>
* Enum describing the possible campaign primary status reasons. Provides
* insight into why a campaign is not serving or not serving optimally. These
* reasons are aggregated to determine an overall campaign primary status.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.CampaignPrimaryStatusReason}
*/
public enum CampaignPrimaryStatusReason
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* The user-specified campaign status is removed.
* </pre>
*
* <code>CAMPAIGN_REMOVED = 2;</code>
*/
CAMPAIGN_REMOVED(2),
/**
* <pre>
* The user-specified campaign status is paused.
* </pre>
*
* <code>CAMPAIGN_PAUSED = 3;</code>
*/
CAMPAIGN_PAUSED(3),
/**
* <pre>
* The user-specified time for this campaign to start is in the future.
* </pre>
*
* <code>CAMPAIGN_PENDING = 4;</code>
*/
CAMPAIGN_PENDING(4),
/**
* <pre>
* The user-specified time for this campaign to end has passed.
* </pre>
*
* <code>CAMPAIGN_ENDED = 5;</code>
*/
CAMPAIGN_ENDED(5),
/**
* <pre>
* The campaign is a draft.
* </pre>
*
* <code>CAMPAIGN_DRAFT = 6;</code>
*/
CAMPAIGN_DRAFT(6),
/**
* <pre>
* The bidding strategy has incorrect user-specified settings.
* </pre>
*
* <code>BIDDING_STRATEGY_MISCONFIGURED = 7;</code>
*/
BIDDING_STRATEGY_MISCONFIGURED(7),
/**
* <pre>
* The bidding strategy is limited by user-specified settings such as lack
* of data or similar.
* </pre>
*
* <code>BIDDING_STRATEGY_LIMITED = 8;</code>
*/
BIDDING_STRATEGY_LIMITED(8),
/**
* <pre>
* The automated bidding system is adjusting to user-specified changes to
* the bidding strategy.
* </pre>
*
* <code>BIDDING_STRATEGY_LEARNING = 9;</code>
*/
BIDDING_STRATEGY_LEARNING(9),
/**
* <pre>
* Campaign could capture more conversion value by adjusting CPA/ROAS
* targets.
* </pre>
*
* <code>BIDDING_STRATEGY_CONSTRAINED = 10;</code>
*/
BIDDING_STRATEGY_CONSTRAINED(10),
/**
* <pre>
* The budget is limiting the campaign's ability to serve.
* </pre>
*
* <code>BUDGET_CONSTRAINED = 11;</code>
*/
BUDGET_CONSTRAINED(11),
/**
* <pre>
* The budget has incorrect user-specified settings.
* </pre>
*
* <code>BUDGET_MISCONFIGURED = 12;</code>
*/
BUDGET_MISCONFIGURED(12),
/**
* <pre>
* Campaign is not targeting all relevant queries.
* </pre>
*
* <code>SEARCH_VOLUME_LIMITED = 13;</code>
*/
SEARCH_VOLUME_LIMITED(13),
/**
* <pre>
* The user-specified ad group statuses are all paused.
* </pre>
*
* <code>AD_GROUPS_PAUSED = 14;</code>
*/
AD_GROUPS_PAUSED(14),
/**
* <pre>
* No eligible ad groups exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUPS = 15;</code>
*/
NO_AD_GROUPS(15),
/**
* <pre>
* The user-specified keyword statuses are all paused.
* </pre>
*
* <code>KEYWORDS_PAUSED = 16;</code>
*/
KEYWORDS_PAUSED(16),
/**
* <pre>
* No eligible keywords exist in this campaign.
* </pre>
*
* <code>NO_KEYWORDS = 17;</code>
*/
NO_KEYWORDS(17),
/**
* <pre>
* The user-specified ad group ad statuses are all paused.
* </pre>
*
* <code>AD_GROUP_ADS_PAUSED = 18;</code>
*/
AD_GROUP_ADS_PAUSED(18),
/**
* <pre>
* No eligible ad group ads exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUP_ADS = 19;</code>
*/
NO_AD_GROUP_ADS(19),
/**
* <pre>
* At least one ad in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ADS_LIMITED_BY_POLICY = 20;</code>
*/
HAS_ADS_LIMITED_BY_POLICY(20),
/**
* <pre>
* At least one ad in this campaign is disapproved.
* </pre>
*
* <code>HAS_ADS_DISAPPROVED = 21;</code>
*/
HAS_ADS_DISAPPROVED(21),
/**
* <pre>
* Most ads in this campaign are pending review.
* </pre>
*
* <code>MOST_ADS_UNDER_REVIEW = 22;</code>
*/
MOST_ADS_UNDER_REVIEW(22),
/**
* <pre>
* The campaign has a lead form goal, and the lead form extension is
* missing.
* </pre>
*
* <code>MISSING_LEAD_FORM_EXTENSION = 23;</code>
*/
MISSING_LEAD_FORM_EXTENSION(23),
/**
* <pre>
* The campaign has a call goal, and the call extension is missing.
* </pre>
*
* <code>MISSING_CALL_EXTENSION = 24;</code>
*/
MISSING_CALL_EXTENSION(24),
/**
* <pre>
* The lead form extension is under review.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_UNDER_REVIEW = 25;</code>
*/
LEAD_FORM_EXTENSION_UNDER_REVIEW(25),
/**
* <pre>
* The lead extension is disapproved.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_DISAPPROVED = 26;</code>
*/
LEAD_FORM_EXTENSION_DISAPPROVED(26),
/**
* <pre>
* The call extension is under review.
* </pre>
*
* <code>CALL_EXTENSION_UNDER_REVIEW = 27;</code>
*/
CALL_EXTENSION_UNDER_REVIEW(27),
/**
* <pre>
* The call extension is disapproved.
* </pre>
*
* <code>CALL_EXTENSION_DISAPPROVED = 28;</code>
*/
CALL_EXTENSION_DISAPPROVED(28),
/**
* <pre>
* No eligible mobile application ad group criteria exist in this campaign.
* </pre>
*
* <code>NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA = 29;</code>
*/
NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA(29),
/**
* <pre>
* The user-specified campaign group status is paused.
* </pre>
*
* <code>CAMPAIGN_GROUP_PAUSED = 30;</code>
*/
CAMPAIGN_GROUP_PAUSED(30),
/**
* <pre>
* The user-specified times of all group budgets associated with the parent
* campaign group has passed.
* </pre>
*
* <code>CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED = 31;</code>
*/
CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED(31),
/**
* <pre>
* The app associated with this ACi campaign is not released in the target
* countries of the campaign.
* </pre>
*
* <code>APP_NOT_RELEASED = 32;</code>
*/
APP_NOT_RELEASED(32),
/**
* <pre>
* The app associated with this ACi campaign is partially released in the
* target countries of the campaign.
* </pre>
*
* <code>APP_PARTIALLY_RELEASED = 33;</code>
*/
APP_PARTIALLY_RELEASED(33),
/**
* <pre>
* At least one asset group in this campaign is disapproved.
* </pre>
*
* <code>HAS_ASSET_GROUPS_DISAPPROVED = 34;</code>
*/
HAS_ASSET_GROUPS_DISAPPROVED(34),
/**
* <pre>
* At least one asset group in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ASSET_GROUPS_LIMITED_BY_POLICY = 35;</code>
*/
HAS_ASSET_GROUPS_LIMITED_BY_POLICY(35),
/**
* <pre>
* Most asset groups in this campaign are pending review.
* </pre>
*
* <code>MOST_ASSET_GROUPS_UNDER_REVIEW = 36;</code>
*/
MOST_ASSET_GROUPS_UNDER_REVIEW(36),
/**
* <pre>
* No eligible asset groups exist in this campaign.
* </pre>
*
* <code>NO_ASSET_GROUPS = 37;</code>
*/
NO_ASSET_GROUPS(37),
/**
* <pre>
* All asset groups in this campaign are paused.
* </pre>
*
* <code>ASSET_GROUPS_PAUSED = 38;</code>
*/
ASSET_GROUPS_PAUSED(38),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* The user-specified campaign status is removed.
* </pre>
*
* <code>CAMPAIGN_REMOVED = 2;</code>
*/
public static final int CAMPAIGN_REMOVED_VALUE = 2;
/**
* <pre>
* The user-specified campaign status is paused.
* </pre>
*
* <code>CAMPAIGN_PAUSED = 3;</code>
*/
public static final int CAMPAIGN_PAUSED_VALUE = 3;
/**
* <pre>
* The user-specified time for this campaign to start is in the future.
* </pre>
*
* <code>CAMPAIGN_PENDING = 4;</code>
*/
public static final int CAMPAIGN_PENDING_VALUE = 4;
/**
* <pre>
* The user-specified time for this campaign to end has passed.
* </pre>
*
* <code>CAMPAIGN_ENDED = 5;</code>
*/
public static final int CAMPAIGN_ENDED_VALUE = 5;
/**
* <pre>
* The campaign is a draft.
* </pre>
*
* <code>CAMPAIGN_DRAFT = 6;</code>
*/
public static final int CAMPAIGN_DRAFT_VALUE = 6;
/**
* <pre>
* The bidding strategy has incorrect user-specified settings.
* </pre>
*
* <code>BIDDING_STRATEGY_MISCONFIGURED = 7;</code>
*/
public static final int BIDDING_STRATEGY_MISCONFIGURED_VALUE = 7;
/**
* <pre>
* The bidding strategy is limited by user-specified settings such as lack
* of data or similar.
* </pre>
*
* <code>BIDDING_STRATEGY_LIMITED = 8;</code>
*/
public static final int BIDDING_STRATEGY_LIMITED_VALUE = 8;
/**
* <pre>
* The automated bidding system is adjusting to user-specified changes to
* the bidding strategy.
* </pre>
*
* <code>BIDDING_STRATEGY_LEARNING = 9;</code>
*/
public static final int BIDDING_STRATEGY_LEARNING_VALUE = 9;
/**
* <pre>
* Campaign could capture more conversion value by adjusting CPA/ROAS
* targets.
* </pre>
*
* <code>BIDDING_STRATEGY_CONSTRAINED = 10;</code>
*/
public static final int BIDDING_STRATEGY_CONSTRAINED_VALUE = 10;
/**
* <pre>
* The budget is limiting the campaign's ability to serve.
* </pre>
*
* <code>BUDGET_CONSTRAINED = 11;</code>
*/
public static final int BUDGET_CONSTRAINED_VALUE = 11;
/**
* <pre>
* The budget has incorrect user-specified settings.
* </pre>
*
* <code>BUDGET_MISCONFIGURED = 12;</code>
*/
public static final int BUDGET_MISCONFIGURED_VALUE = 12;
/**
* <pre>
* Campaign is not targeting all relevant queries.
* </pre>
*
* <code>SEARCH_VOLUME_LIMITED = 13;</code>
*/
public static final int SEARCH_VOLUME_LIMITED_VALUE = 13;
/**
* <pre>
* The user-specified ad group statuses are all paused.
* </pre>
*
* <code>AD_GROUPS_PAUSED = 14;</code>
*/
public static final int AD_GROUPS_PAUSED_VALUE = 14;
/**
* <pre>
* No eligible ad groups exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUPS = 15;</code>
*/
public static final int NO_AD_GROUPS_VALUE = 15;
/**
* <pre>
* The user-specified keyword statuses are all paused.
* </pre>
*
* <code>KEYWORDS_PAUSED = 16;</code>
*/
public static final int KEYWORDS_PAUSED_VALUE = 16;
/**
* <pre>
* No eligible keywords exist in this campaign.
* </pre>
*
* <code>NO_KEYWORDS = 17;</code>
*/
public static final int NO_KEYWORDS_VALUE = 17;
/**
* <pre>
* The user-specified ad group ad statuses are all paused.
* </pre>
*
* <code>AD_GROUP_ADS_PAUSED = 18;</code>
*/
public static final int AD_GROUP_ADS_PAUSED_VALUE = 18;
/**
* <pre>
* No eligible ad group ads exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUP_ADS = 19;</code>
*/
public static final int NO_AD_GROUP_ADS_VALUE = 19;
/**
* <pre>
* At least one ad in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ADS_LIMITED_BY_POLICY = 20;</code>
*/
public static final int HAS_ADS_LIMITED_BY_POLICY_VALUE = 20;
/**
* <pre>
* At least one ad in this campaign is disapproved.
* </pre>
*
* <code>HAS_ADS_DISAPPROVED = 21;</code>
*/
public static final int HAS_ADS_DISAPPROVED_VALUE = 21;
/**
* <pre>
* Most ads in this campaign are pending review.
* </pre>
*
* <code>MOST_ADS_UNDER_REVIEW = 22;</code>
*/
public static final int MOST_ADS_UNDER_REVIEW_VALUE = 22;
/**
* <pre>
* The campaign has a lead form goal, and the lead form extension is
* missing.
* </pre>
*
* <code>MISSING_LEAD_FORM_EXTENSION = 23;</code>
*/
public static final int MISSING_LEAD_FORM_EXTENSION_VALUE = 23;
/**
* <pre>
* The campaign has a call goal, and the call extension is missing.
* </pre>
*
* <code>MISSING_CALL_EXTENSION = 24;</code>
*/
public static final int MISSING_CALL_EXTENSION_VALUE = 24;
/**
* <pre>
* The lead form extension is under review.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_UNDER_REVIEW = 25;</code>
*/
public static final int LEAD_FORM_EXTENSION_UNDER_REVIEW_VALUE = 25;
/**
* <pre>
* The lead extension is disapproved.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_DISAPPROVED = 26;</code>
*/
public static final int LEAD_FORM_EXTENSION_DISAPPROVED_VALUE = 26;
/**
* <pre>
* The call extension is under review.
* </pre>
*
* <code>CALL_EXTENSION_UNDER_REVIEW = 27;</code>
*/
public static final int CALL_EXTENSION_UNDER_REVIEW_VALUE = 27;
/**
* <pre>
* The call extension is disapproved.
* </pre>
*
* <code>CALL_EXTENSION_DISAPPROVED = 28;</code>
*/
public static final int CALL_EXTENSION_DISAPPROVED_VALUE = 28;
/**
* <pre>
* No eligible mobile application ad group criteria exist in this campaign.
* </pre>
*
* <code>NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA = 29;</code>
*/
public static final int NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA_VALUE = 29;
/**
* <pre>
* The user-specified campaign group status is paused.
* </pre>
*
* <code>CAMPAIGN_GROUP_PAUSED = 30;</code>
*/
public static final int CAMPAIGN_GROUP_PAUSED_VALUE = 30;
/**
* <pre>
* The user-specified times of all group budgets associated with the parent
* campaign group has passed.
* </pre>
*
* <code>CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED = 31;</code>
*/
public static final int CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED_VALUE = 31;
/**
* <pre>
* The app associated with this ACi campaign is not released in the target
* countries of the campaign.
* </pre>
*
* <code>APP_NOT_RELEASED = 32;</code>
*/
public static final int APP_NOT_RELEASED_VALUE = 32;
/**
* <pre>
* The app associated with this ACi campaign is partially released in the
* target countries of the campaign.
* </pre>
*
* <code>APP_PARTIALLY_RELEASED = 33;</code>
*/
public static final int APP_PARTIALLY_RELEASED_VALUE = 33;
/**
* <pre>
* At least one asset group in this campaign is disapproved.
* </pre>
*
* <code>HAS_ASSET_GROUPS_DISAPPROVED = 34;</code>
*/
public static final int HAS_ASSET_GROUPS_DISAPPROVED_VALUE = 34;
/**
* <pre>
* At least one asset group in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ASSET_GROUPS_LIMITED_BY_POLICY = 35;</code>
*/
public static final int HAS_ASSET_GROUPS_LIMITED_BY_POLICY_VALUE = 35;
/**
* <pre>
* Most asset groups in this campaign are pending review.
* </pre>
*
* <code>MOST_ASSET_GROUPS_UNDER_REVIEW = 36;</code>
*/
public static final int MOST_ASSET_GROUPS_UNDER_REVIEW_VALUE = 36;
/**
* <pre>
* No eligible asset groups exist in this campaign.
* </pre>
*
* <code>NO_ASSET_GROUPS = 37;</code>
*/
public static final int NO_ASSET_GROUPS_VALUE = 37;
/**
* <pre>
* All asset groups in this campaign are paused.
* </pre>
*
* <code>ASSET_GROUPS_PAUSED = 38;</code>
*/
public static final int ASSET_GROUPS_PAUSED_VALUE = 38;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CampaignPrimaryStatusReason valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CampaignPrimaryStatusReason forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CAMPAIGN_REMOVED;
case 3: return CAMPAIGN_PAUSED;
case 4: return CAMPAIGN_PENDING;
case 5: return CAMPAIGN_ENDED;
case 6: return CAMPAIGN_DRAFT;
case 7: return BIDDING_STRATEGY_MISCONFIGURED;
case 8: return BIDDING_STRATEGY_LIMITED;
case 9: return BIDDING_STRATEGY_LEARNING;
case 10: return BIDDING_STRATEGY_CONSTRAINED;
case 11: return BUDGET_CONSTRAINED;
case 12: return BUDGET_MISCONFIGURED;
case 13: return SEARCH_VOLUME_LIMITED;
case 14: return AD_GROUPS_PAUSED;
case 15: return NO_AD_GROUPS;
case 16: return KEYWORDS_PAUSED;
case 17: return NO_KEYWORDS;
case 18: return AD_GROUP_ADS_PAUSED;
case 19: return NO_AD_GROUP_ADS;
case 20: return HAS_ADS_LIMITED_BY_POLICY;
case 21: return HAS_ADS_DISAPPROVED;
case 22: return MOST_ADS_UNDER_REVIEW;
case 23: return MISSING_LEAD_FORM_EXTENSION;
case 24: return MISSING_CALL_EXTENSION;
case 25: return LEAD_FORM_EXTENSION_UNDER_REVIEW;
case 26: return LEAD_FORM_EXTENSION_DISAPPROVED;
case 27: return CALL_EXTENSION_UNDER_REVIEW;
case 28: return CALL_EXTENSION_DISAPPROVED;
case 29: return NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA;
case 30: return CAMPAIGN_GROUP_PAUSED;
case 31: return CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED;
case 32: return APP_NOT_RELEASED;
case 33: return APP_PARTIALLY_RELEASED;
case 34: return HAS_ASSET_GROUPS_DISAPPROVED;
case 35: return HAS_ASSET_GROUPS_LIMITED_BY_POLICY;
case 36: return MOST_ASSET_GROUPS_UNDER_REVIEW;
case 37: return NO_ASSET_GROUPS;
case 38: return ASSET_GROUPS_PAUSED;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CampaignPrimaryStatusReason>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
CampaignPrimaryStatusReason> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CampaignPrimaryStatusReason>() {
public CampaignPrimaryStatusReason findValueByNumber(int number) {
return CampaignPrimaryStatusReason.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.getDescriptor().getEnumTypes().get(0);
}
private static final CampaignPrimaryStatusReason[] VALUES = values();
public static CampaignPrimaryStatusReason valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CampaignPrimaryStatusReason(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.CampaignPrimaryStatusReason)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum other = (com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible campaign primary status reasons.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum)
com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v20_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v20_enums_CampaignPrimaryStatusReasonEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.class, com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v20_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum build() {
com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum buildPartial() {
com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum result = new com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum) {
return mergeFrom((com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum other) {
if (other == com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum)
private static final com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum();
}
public static com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum>
PARSER = new com.google.protobuf.AbstractParser<CampaignPrimaryStatusReasonEnum>() {
@java.lang.Override
public CampaignPrimaryStatusReasonEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.enums.CampaignPrimaryStatusReasonEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services | 37,297 | clients/google-api-services-sheets/v4/1.26.0/com/google/api/services/sheets/v4/model/Request.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.sheets.v4.model;
/**
* A single kind of update to apply to a spreadsheet.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Sheets API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Request extends com.google.api.client.json.GenericJson {
/**
* Adds a new banded range
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddBandingRequest addBanding;
/**
* Adds a chart.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddChartRequest addChart;
/**
* Adds a new conditional format rule.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddConditionalFormatRuleRequest addConditionalFormatRule;
/**
* Creates a group over the specified range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddDimensionGroupRequest addDimensionGroup;
/**
* Adds a filter view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddFilterViewRequest addFilterView;
/**
* Adds a named range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddNamedRangeRequest addNamedRange;
/**
* Adds a protected range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddProtectedRangeRequest addProtectedRange;
/**
* Adds a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddSheetRequest addSheet;
/**
* Appends cells after the last row with data in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AppendCellsRequest appendCells;
/**
* Appends dimensions to the end of a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AppendDimensionRequest appendDimension;
/**
* Automatically fills in more data based on existing data.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AutoFillRequest autoFill;
/**
* Automatically resizes one or more dimensions based on the contents of the cells in that
* dimension.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AutoResizeDimensionsRequest autoResizeDimensions;
/**
* Clears the basic filter on a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClearBasicFilterRequest clearBasicFilter;
/**
* Copies data from one area and pastes it to another.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CopyPasteRequest copyPaste;
/**
* Creates new developer metadata
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreateDeveloperMetadataRequest createDeveloperMetadata;
/**
* Cuts data from one area and pastes it to another.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CutPasteRequest cutPaste;
/**
* Removes a banded range
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteBandingRequest deleteBanding;
/**
* Deletes an existing conditional format rule.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteConditionalFormatRuleRequest deleteConditionalFormatRule;
/**
* Deletes developer metadata
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteDeveloperMetadataRequest deleteDeveloperMetadata;
/**
* Deletes rows or columns in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteDimensionRequest deleteDimension;
/**
* Deletes a group over the specified range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteDimensionGroupRequest deleteDimensionGroup;
/**
* Deletes an embedded object (e.g, chart, image) in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteEmbeddedObjectRequest deleteEmbeddedObject;
/**
* Deletes a filter view from a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteFilterViewRequest deleteFilterView;
/**
* Deletes a named range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteNamedRangeRequest deleteNamedRange;
/**
* Deletes a protected range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteProtectedRangeRequest deleteProtectedRange;
/**
* Deletes a range of cells from a sheet, shifting the remaining cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteRangeRequest deleteRange;
/**
* Deletes a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteSheetRequest deleteSheet;
/**
* Duplicates a filter view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DuplicateFilterViewRequest duplicateFilterView;
/**
* Duplicates a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DuplicateSheetRequest duplicateSheet;
/**
* Finds and replaces occurrences of some text with other text.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FindReplaceRequest findReplace;
/**
* Inserts new rows or columns in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InsertDimensionRequest insertDimension;
/**
* Inserts new cells in a sheet, shifting the existing cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InsertRangeRequest insertRange;
/**
* Merges cells together.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private MergeCellsRequest mergeCells;
/**
* Moves rows or columns to another location in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private MoveDimensionRequest moveDimension;
/**
* Pastes data (HTML or delimited) into a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private PasteDataRequest pasteData;
/**
* Randomizes the order of the rows in a range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RandomizeRangeRequest randomizeRange;
/**
* Repeats a single cell across a range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RepeatCellRequest repeatCell;
/**
* Sets the basic filter on a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SetBasicFilterRequest setBasicFilter;
/**
* Sets data validation for one or more cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SetDataValidationRequest setDataValidation;
/**
* Sorts data in a range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SortRangeRequest sortRange;
/**
* Converts a column of text into many columns of text.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TextToColumnsRequest textToColumns;
/**
* Unmerges merged cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UnmergeCellsRequest unmergeCells;
/**
* Updates a banded range
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateBandingRequest updateBanding;
/**
* Updates the borders in a range of cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateBordersRequest updateBorders;
/**
* Updates many cells at once.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateCellsRequest updateCells;
/**
* Updates a chart's specifications.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateChartSpecRequest updateChartSpec;
/**
* Updates an existing conditional format rule.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateConditionalFormatRuleRequest updateConditionalFormatRule;
/**
* Updates an existing developer metadata entry
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateDeveloperMetadataRequest updateDeveloperMetadata;
/**
* Updates the state of the specified group.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateDimensionGroupRequest updateDimensionGroup;
/**
* Updates dimensions' properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateDimensionPropertiesRequest updateDimensionProperties;
/**
* Updates an embedded object's (e.g. chart, image) position.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateEmbeddedObjectPositionRequest updateEmbeddedObjectPosition;
/**
* Updates the properties of a filter view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateFilterViewRequest updateFilterView;
/**
* Updates a named range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateNamedRangeRequest updateNamedRange;
/**
* Updates a protected range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateProtectedRangeRequest updateProtectedRange;
/**
* Updates a sheet's properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateSheetPropertiesRequest updateSheetProperties;
/**
* Updates the spreadsheet's properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateSpreadsheetPropertiesRequest updateSpreadsheetProperties;
/**
* Adds a new banded range
* @return value or {@code null} for none
*/
public AddBandingRequest getAddBanding() {
return addBanding;
}
/**
* Adds a new banded range
* @param addBanding addBanding or {@code null} for none
*/
public Request setAddBanding(AddBandingRequest addBanding) {
this.addBanding = addBanding;
return this;
}
/**
* Adds a chart.
* @return value or {@code null} for none
*/
public AddChartRequest getAddChart() {
return addChart;
}
/**
* Adds a chart.
* @param addChart addChart or {@code null} for none
*/
public Request setAddChart(AddChartRequest addChart) {
this.addChart = addChart;
return this;
}
/**
* Adds a new conditional format rule.
* @return value or {@code null} for none
*/
public AddConditionalFormatRuleRequest getAddConditionalFormatRule() {
return addConditionalFormatRule;
}
/**
* Adds a new conditional format rule.
* @param addConditionalFormatRule addConditionalFormatRule or {@code null} for none
*/
public Request setAddConditionalFormatRule(AddConditionalFormatRuleRequest addConditionalFormatRule) {
this.addConditionalFormatRule = addConditionalFormatRule;
return this;
}
/**
* Creates a group over the specified range.
* @return value or {@code null} for none
*/
public AddDimensionGroupRequest getAddDimensionGroup() {
return addDimensionGroup;
}
/**
* Creates a group over the specified range.
* @param addDimensionGroup addDimensionGroup or {@code null} for none
*/
public Request setAddDimensionGroup(AddDimensionGroupRequest addDimensionGroup) {
this.addDimensionGroup = addDimensionGroup;
return this;
}
/**
* Adds a filter view.
* @return value or {@code null} for none
*/
public AddFilterViewRequest getAddFilterView() {
return addFilterView;
}
/**
* Adds a filter view.
* @param addFilterView addFilterView or {@code null} for none
*/
public Request setAddFilterView(AddFilterViewRequest addFilterView) {
this.addFilterView = addFilterView;
return this;
}
/**
* Adds a named range.
* @return value or {@code null} for none
*/
public AddNamedRangeRequest getAddNamedRange() {
return addNamedRange;
}
/**
* Adds a named range.
* @param addNamedRange addNamedRange or {@code null} for none
*/
public Request setAddNamedRange(AddNamedRangeRequest addNamedRange) {
this.addNamedRange = addNamedRange;
return this;
}
/**
* Adds a protected range.
* @return value or {@code null} for none
*/
public AddProtectedRangeRequest getAddProtectedRange() {
return addProtectedRange;
}
/**
* Adds a protected range.
* @param addProtectedRange addProtectedRange or {@code null} for none
*/
public Request setAddProtectedRange(AddProtectedRangeRequest addProtectedRange) {
this.addProtectedRange = addProtectedRange;
return this;
}
/**
* Adds a sheet.
* @return value or {@code null} for none
*/
public AddSheetRequest getAddSheet() {
return addSheet;
}
/**
* Adds a sheet.
* @param addSheet addSheet or {@code null} for none
*/
public Request setAddSheet(AddSheetRequest addSheet) {
this.addSheet = addSheet;
return this;
}
/**
* Appends cells after the last row with data in a sheet.
* @return value or {@code null} for none
*/
public AppendCellsRequest getAppendCells() {
return appendCells;
}
/**
* Appends cells after the last row with data in a sheet.
* @param appendCells appendCells or {@code null} for none
*/
public Request setAppendCells(AppendCellsRequest appendCells) {
this.appendCells = appendCells;
return this;
}
/**
* Appends dimensions to the end of a sheet.
* @return value or {@code null} for none
*/
public AppendDimensionRequest getAppendDimension() {
return appendDimension;
}
/**
* Appends dimensions to the end of a sheet.
* @param appendDimension appendDimension or {@code null} for none
*/
public Request setAppendDimension(AppendDimensionRequest appendDimension) {
this.appendDimension = appendDimension;
return this;
}
/**
* Automatically fills in more data based on existing data.
* @return value or {@code null} for none
*/
public AutoFillRequest getAutoFill() {
return autoFill;
}
/**
* Automatically fills in more data based on existing data.
* @param autoFill autoFill or {@code null} for none
*/
public Request setAutoFill(AutoFillRequest autoFill) {
this.autoFill = autoFill;
return this;
}
/**
* Automatically resizes one or more dimensions based on the contents of the cells in that
* dimension.
* @return value or {@code null} for none
*/
public AutoResizeDimensionsRequest getAutoResizeDimensions() {
return autoResizeDimensions;
}
/**
* Automatically resizes one or more dimensions based on the contents of the cells in that
* dimension.
* @param autoResizeDimensions autoResizeDimensions or {@code null} for none
*/
public Request setAutoResizeDimensions(AutoResizeDimensionsRequest autoResizeDimensions) {
this.autoResizeDimensions = autoResizeDimensions;
return this;
}
/**
* Clears the basic filter on a sheet.
* @return value or {@code null} for none
*/
public ClearBasicFilterRequest getClearBasicFilter() {
return clearBasicFilter;
}
/**
* Clears the basic filter on a sheet.
* @param clearBasicFilter clearBasicFilter or {@code null} for none
*/
public Request setClearBasicFilter(ClearBasicFilterRequest clearBasicFilter) {
this.clearBasicFilter = clearBasicFilter;
return this;
}
/**
* Copies data from one area and pastes it to another.
* @return value or {@code null} for none
*/
public CopyPasteRequest getCopyPaste() {
return copyPaste;
}
/**
* Copies data from one area and pastes it to another.
* @param copyPaste copyPaste or {@code null} for none
*/
public Request setCopyPaste(CopyPasteRequest copyPaste) {
this.copyPaste = copyPaste;
return this;
}
/**
* Creates new developer metadata
* @return value or {@code null} for none
*/
public CreateDeveloperMetadataRequest getCreateDeveloperMetadata() {
return createDeveloperMetadata;
}
/**
* Creates new developer metadata
* @param createDeveloperMetadata createDeveloperMetadata or {@code null} for none
*/
public Request setCreateDeveloperMetadata(CreateDeveloperMetadataRequest createDeveloperMetadata) {
this.createDeveloperMetadata = createDeveloperMetadata;
return this;
}
/**
* Cuts data from one area and pastes it to another.
* @return value or {@code null} for none
*/
public CutPasteRequest getCutPaste() {
return cutPaste;
}
/**
* Cuts data from one area and pastes it to another.
* @param cutPaste cutPaste or {@code null} for none
*/
public Request setCutPaste(CutPasteRequest cutPaste) {
this.cutPaste = cutPaste;
return this;
}
/**
* Removes a banded range
* @return value or {@code null} for none
*/
public DeleteBandingRequest getDeleteBanding() {
return deleteBanding;
}
/**
* Removes a banded range
* @param deleteBanding deleteBanding or {@code null} for none
*/
public Request setDeleteBanding(DeleteBandingRequest deleteBanding) {
this.deleteBanding = deleteBanding;
return this;
}
/**
* Deletes an existing conditional format rule.
* @return value or {@code null} for none
*/
public DeleteConditionalFormatRuleRequest getDeleteConditionalFormatRule() {
return deleteConditionalFormatRule;
}
/**
* Deletes an existing conditional format rule.
* @param deleteConditionalFormatRule deleteConditionalFormatRule or {@code null} for none
*/
public Request setDeleteConditionalFormatRule(DeleteConditionalFormatRuleRequest deleteConditionalFormatRule) {
this.deleteConditionalFormatRule = deleteConditionalFormatRule;
return this;
}
/**
* Deletes developer metadata
* @return value or {@code null} for none
*/
public DeleteDeveloperMetadataRequest getDeleteDeveloperMetadata() {
return deleteDeveloperMetadata;
}
/**
* Deletes developer metadata
* @param deleteDeveloperMetadata deleteDeveloperMetadata or {@code null} for none
*/
public Request setDeleteDeveloperMetadata(DeleteDeveloperMetadataRequest deleteDeveloperMetadata) {
this.deleteDeveloperMetadata = deleteDeveloperMetadata;
return this;
}
/**
* Deletes rows or columns in a sheet.
* @return value or {@code null} for none
*/
public DeleteDimensionRequest getDeleteDimension() {
return deleteDimension;
}
/**
* Deletes rows or columns in a sheet.
* @param deleteDimension deleteDimension or {@code null} for none
*/
public Request setDeleteDimension(DeleteDimensionRequest deleteDimension) {
this.deleteDimension = deleteDimension;
return this;
}
/**
* Deletes a group over the specified range.
* @return value or {@code null} for none
*/
public DeleteDimensionGroupRequest getDeleteDimensionGroup() {
return deleteDimensionGroup;
}
/**
* Deletes a group over the specified range.
* @param deleteDimensionGroup deleteDimensionGroup or {@code null} for none
*/
public Request setDeleteDimensionGroup(DeleteDimensionGroupRequest deleteDimensionGroup) {
this.deleteDimensionGroup = deleteDimensionGroup;
return this;
}
/**
* Deletes an embedded object (e.g, chart, image) in a sheet.
* @return value or {@code null} for none
*/
public DeleteEmbeddedObjectRequest getDeleteEmbeddedObject() {
return deleteEmbeddedObject;
}
/**
* Deletes an embedded object (e.g, chart, image) in a sheet.
* @param deleteEmbeddedObject deleteEmbeddedObject or {@code null} for none
*/
public Request setDeleteEmbeddedObject(DeleteEmbeddedObjectRequest deleteEmbeddedObject) {
this.deleteEmbeddedObject = deleteEmbeddedObject;
return this;
}
/**
* Deletes a filter view from a sheet.
* @return value or {@code null} for none
*/
public DeleteFilterViewRequest getDeleteFilterView() {
return deleteFilterView;
}
/**
* Deletes a filter view from a sheet.
* @param deleteFilterView deleteFilterView or {@code null} for none
*/
public Request setDeleteFilterView(DeleteFilterViewRequest deleteFilterView) {
this.deleteFilterView = deleteFilterView;
return this;
}
/**
* Deletes a named range.
* @return value or {@code null} for none
*/
public DeleteNamedRangeRequest getDeleteNamedRange() {
return deleteNamedRange;
}
/**
* Deletes a named range.
* @param deleteNamedRange deleteNamedRange or {@code null} for none
*/
public Request setDeleteNamedRange(DeleteNamedRangeRequest deleteNamedRange) {
this.deleteNamedRange = deleteNamedRange;
return this;
}
/**
* Deletes a protected range.
* @return value or {@code null} for none
*/
public DeleteProtectedRangeRequest getDeleteProtectedRange() {
return deleteProtectedRange;
}
/**
* Deletes a protected range.
* @param deleteProtectedRange deleteProtectedRange or {@code null} for none
*/
public Request setDeleteProtectedRange(DeleteProtectedRangeRequest deleteProtectedRange) {
this.deleteProtectedRange = deleteProtectedRange;
return this;
}
/**
* Deletes a range of cells from a sheet, shifting the remaining cells.
* @return value or {@code null} for none
*/
public DeleteRangeRequest getDeleteRange() {
return deleteRange;
}
/**
* Deletes a range of cells from a sheet, shifting the remaining cells.
* @param deleteRange deleteRange or {@code null} for none
*/
public Request setDeleteRange(DeleteRangeRequest deleteRange) {
this.deleteRange = deleteRange;
return this;
}
/**
* Deletes a sheet.
* @return value or {@code null} for none
*/
public DeleteSheetRequest getDeleteSheet() {
return deleteSheet;
}
/**
* Deletes a sheet.
* @param deleteSheet deleteSheet or {@code null} for none
*/
public Request setDeleteSheet(DeleteSheetRequest deleteSheet) {
this.deleteSheet = deleteSheet;
return this;
}
/**
* Duplicates a filter view.
* @return value or {@code null} for none
*/
public DuplicateFilterViewRequest getDuplicateFilterView() {
return duplicateFilterView;
}
/**
* Duplicates a filter view.
* @param duplicateFilterView duplicateFilterView or {@code null} for none
*/
public Request setDuplicateFilterView(DuplicateFilterViewRequest duplicateFilterView) {
this.duplicateFilterView = duplicateFilterView;
return this;
}
/**
* Duplicates a sheet.
* @return value or {@code null} for none
*/
public DuplicateSheetRequest getDuplicateSheet() {
return duplicateSheet;
}
/**
* Duplicates a sheet.
* @param duplicateSheet duplicateSheet or {@code null} for none
*/
public Request setDuplicateSheet(DuplicateSheetRequest duplicateSheet) {
this.duplicateSheet = duplicateSheet;
return this;
}
/**
* Finds and replaces occurrences of some text with other text.
* @return value or {@code null} for none
*/
public FindReplaceRequest getFindReplace() {
return findReplace;
}
/**
* Finds and replaces occurrences of some text with other text.
* @param findReplace findReplace or {@code null} for none
*/
public Request setFindReplace(FindReplaceRequest findReplace) {
this.findReplace = findReplace;
return this;
}
/**
* Inserts new rows or columns in a sheet.
* @return value or {@code null} for none
*/
public InsertDimensionRequest getInsertDimension() {
return insertDimension;
}
/**
* Inserts new rows or columns in a sheet.
* @param insertDimension insertDimension or {@code null} for none
*/
public Request setInsertDimension(InsertDimensionRequest insertDimension) {
this.insertDimension = insertDimension;
return this;
}
/**
* Inserts new cells in a sheet, shifting the existing cells.
* @return value or {@code null} for none
*/
public InsertRangeRequest getInsertRange() {
return insertRange;
}
/**
* Inserts new cells in a sheet, shifting the existing cells.
* @param insertRange insertRange or {@code null} for none
*/
public Request setInsertRange(InsertRangeRequest insertRange) {
this.insertRange = insertRange;
return this;
}
/**
* Merges cells together.
* @return value or {@code null} for none
*/
public MergeCellsRequest getMergeCells() {
return mergeCells;
}
/**
* Merges cells together.
* @param mergeCells mergeCells or {@code null} for none
*/
public Request setMergeCells(MergeCellsRequest mergeCells) {
this.mergeCells = mergeCells;
return this;
}
/**
* Moves rows or columns to another location in a sheet.
* @return value or {@code null} for none
*/
public MoveDimensionRequest getMoveDimension() {
return moveDimension;
}
/**
* Moves rows or columns to another location in a sheet.
* @param moveDimension moveDimension or {@code null} for none
*/
public Request setMoveDimension(MoveDimensionRequest moveDimension) {
this.moveDimension = moveDimension;
return this;
}
/**
* Pastes data (HTML or delimited) into a sheet.
* @return value or {@code null} for none
*/
public PasteDataRequest getPasteData() {
return pasteData;
}
/**
* Pastes data (HTML or delimited) into a sheet.
* @param pasteData pasteData or {@code null} for none
*/
public Request setPasteData(PasteDataRequest pasteData) {
this.pasteData = pasteData;
return this;
}
/**
* Randomizes the order of the rows in a range.
* @return value or {@code null} for none
*/
public RandomizeRangeRequest getRandomizeRange() {
return randomizeRange;
}
/**
* Randomizes the order of the rows in a range.
* @param randomizeRange randomizeRange or {@code null} for none
*/
public Request setRandomizeRange(RandomizeRangeRequest randomizeRange) {
this.randomizeRange = randomizeRange;
return this;
}
/**
* Repeats a single cell across a range.
* @return value or {@code null} for none
*/
public RepeatCellRequest getRepeatCell() {
return repeatCell;
}
/**
* Repeats a single cell across a range.
* @param repeatCell repeatCell or {@code null} for none
*/
public Request setRepeatCell(RepeatCellRequest repeatCell) {
this.repeatCell = repeatCell;
return this;
}
/**
* Sets the basic filter on a sheet.
* @return value or {@code null} for none
*/
public SetBasicFilterRequest getSetBasicFilter() {
return setBasicFilter;
}
/**
* Sets the basic filter on a sheet.
* @param setBasicFilter setBasicFilter or {@code null} for none
*/
public Request setSetBasicFilter(SetBasicFilterRequest setBasicFilter) {
this.setBasicFilter = setBasicFilter;
return this;
}
/**
* Sets data validation for one or more cells.
* @return value or {@code null} for none
*/
public SetDataValidationRequest getSetDataValidation() {
return setDataValidation;
}
/**
* Sets data validation for one or more cells.
* @param setDataValidation setDataValidation or {@code null} for none
*/
public Request setSetDataValidation(SetDataValidationRequest setDataValidation) {
this.setDataValidation = setDataValidation;
return this;
}
/**
* Sorts data in a range.
* @return value or {@code null} for none
*/
public SortRangeRequest getSortRange() {
return sortRange;
}
/**
* Sorts data in a range.
* @param sortRange sortRange or {@code null} for none
*/
public Request setSortRange(SortRangeRequest sortRange) {
this.sortRange = sortRange;
return this;
}
/**
* Converts a column of text into many columns of text.
* @return value or {@code null} for none
*/
public TextToColumnsRequest getTextToColumns() {
return textToColumns;
}
/**
* Converts a column of text into many columns of text.
* @param textToColumns textToColumns or {@code null} for none
*/
public Request setTextToColumns(TextToColumnsRequest textToColumns) {
this.textToColumns = textToColumns;
return this;
}
/**
* Unmerges merged cells.
* @return value or {@code null} for none
*/
public UnmergeCellsRequest getUnmergeCells() {
return unmergeCells;
}
/**
* Unmerges merged cells.
* @param unmergeCells unmergeCells or {@code null} for none
*/
public Request setUnmergeCells(UnmergeCellsRequest unmergeCells) {
this.unmergeCells = unmergeCells;
return this;
}
/**
* Updates a banded range
* @return value or {@code null} for none
*/
public UpdateBandingRequest getUpdateBanding() {
return updateBanding;
}
/**
* Updates a banded range
* @param updateBanding updateBanding or {@code null} for none
*/
public Request setUpdateBanding(UpdateBandingRequest updateBanding) {
this.updateBanding = updateBanding;
return this;
}
/**
* Updates the borders in a range of cells.
* @return value or {@code null} for none
*/
public UpdateBordersRequest getUpdateBorders() {
return updateBorders;
}
/**
* Updates the borders in a range of cells.
* @param updateBorders updateBorders or {@code null} for none
*/
public Request setUpdateBorders(UpdateBordersRequest updateBorders) {
this.updateBorders = updateBorders;
return this;
}
/**
* Updates many cells at once.
* @return value or {@code null} for none
*/
public UpdateCellsRequest getUpdateCells() {
return updateCells;
}
/**
* Updates many cells at once.
* @param updateCells updateCells or {@code null} for none
*/
public Request setUpdateCells(UpdateCellsRequest updateCells) {
this.updateCells = updateCells;
return this;
}
/**
* Updates a chart's specifications.
* @return value or {@code null} for none
*/
public UpdateChartSpecRequest getUpdateChartSpec() {
return updateChartSpec;
}
/**
* Updates a chart's specifications.
* @param updateChartSpec updateChartSpec or {@code null} for none
*/
public Request setUpdateChartSpec(UpdateChartSpecRequest updateChartSpec) {
this.updateChartSpec = updateChartSpec;
return this;
}
/**
* Updates an existing conditional format rule.
* @return value or {@code null} for none
*/
public UpdateConditionalFormatRuleRequest getUpdateConditionalFormatRule() {
return updateConditionalFormatRule;
}
/**
* Updates an existing conditional format rule.
* @param updateConditionalFormatRule updateConditionalFormatRule or {@code null} for none
*/
public Request setUpdateConditionalFormatRule(UpdateConditionalFormatRuleRequest updateConditionalFormatRule) {
this.updateConditionalFormatRule = updateConditionalFormatRule;
return this;
}
/**
* Updates an existing developer metadata entry
* @return value or {@code null} for none
*/
public UpdateDeveloperMetadataRequest getUpdateDeveloperMetadata() {
return updateDeveloperMetadata;
}
/**
* Updates an existing developer metadata entry
* @param updateDeveloperMetadata updateDeveloperMetadata or {@code null} for none
*/
public Request setUpdateDeveloperMetadata(UpdateDeveloperMetadataRequest updateDeveloperMetadata) {
this.updateDeveloperMetadata = updateDeveloperMetadata;
return this;
}
/**
* Updates the state of the specified group.
* @return value or {@code null} for none
*/
public UpdateDimensionGroupRequest getUpdateDimensionGroup() {
return updateDimensionGroup;
}
/**
* Updates the state of the specified group.
* @param updateDimensionGroup updateDimensionGroup or {@code null} for none
*/
public Request setUpdateDimensionGroup(UpdateDimensionGroupRequest updateDimensionGroup) {
this.updateDimensionGroup = updateDimensionGroup;
return this;
}
/**
* Updates dimensions' properties.
* @return value or {@code null} for none
*/
public UpdateDimensionPropertiesRequest getUpdateDimensionProperties() {
return updateDimensionProperties;
}
/**
* Updates dimensions' properties.
* @param updateDimensionProperties updateDimensionProperties or {@code null} for none
*/
public Request setUpdateDimensionProperties(UpdateDimensionPropertiesRequest updateDimensionProperties) {
this.updateDimensionProperties = updateDimensionProperties;
return this;
}
/**
* Updates an embedded object's (e.g. chart, image) position.
* @return value or {@code null} for none
*/
public UpdateEmbeddedObjectPositionRequest getUpdateEmbeddedObjectPosition() {
return updateEmbeddedObjectPosition;
}
/**
* Updates an embedded object's (e.g. chart, image) position.
* @param updateEmbeddedObjectPosition updateEmbeddedObjectPosition or {@code null} for none
*/
public Request setUpdateEmbeddedObjectPosition(UpdateEmbeddedObjectPositionRequest updateEmbeddedObjectPosition) {
this.updateEmbeddedObjectPosition = updateEmbeddedObjectPosition;
return this;
}
/**
* Updates the properties of a filter view.
* @return value or {@code null} for none
*/
public UpdateFilterViewRequest getUpdateFilterView() {
return updateFilterView;
}
/**
* Updates the properties of a filter view.
* @param updateFilterView updateFilterView or {@code null} for none
*/
public Request setUpdateFilterView(UpdateFilterViewRequest updateFilterView) {
this.updateFilterView = updateFilterView;
return this;
}
/**
* Updates a named range.
* @return value or {@code null} for none
*/
public UpdateNamedRangeRequest getUpdateNamedRange() {
return updateNamedRange;
}
/**
* Updates a named range.
* @param updateNamedRange updateNamedRange or {@code null} for none
*/
public Request setUpdateNamedRange(UpdateNamedRangeRequest updateNamedRange) {
this.updateNamedRange = updateNamedRange;
return this;
}
/**
* Updates a protected range.
* @return value or {@code null} for none
*/
public UpdateProtectedRangeRequest getUpdateProtectedRange() {
return updateProtectedRange;
}
/**
* Updates a protected range.
* @param updateProtectedRange updateProtectedRange or {@code null} for none
*/
public Request setUpdateProtectedRange(UpdateProtectedRangeRequest updateProtectedRange) {
this.updateProtectedRange = updateProtectedRange;
return this;
}
/**
* Updates a sheet's properties.
* @return value or {@code null} for none
*/
public UpdateSheetPropertiesRequest getUpdateSheetProperties() {
return updateSheetProperties;
}
/**
* Updates a sheet's properties.
* @param updateSheetProperties updateSheetProperties or {@code null} for none
*/
public Request setUpdateSheetProperties(UpdateSheetPropertiesRequest updateSheetProperties) {
this.updateSheetProperties = updateSheetProperties;
return this;
}
/**
* Updates the spreadsheet's properties.
* @return value or {@code null} for none
*/
public UpdateSpreadsheetPropertiesRequest getUpdateSpreadsheetProperties() {
return updateSpreadsheetProperties;
}
/**
* Updates the spreadsheet's properties.
* @param updateSpreadsheetProperties updateSpreadsheetProperties or {@code null} for none
*/
public Request setUpdateSpreadsheetProperties(UpdateSpreadsheetPropertiesRequest updateSpreadsheetProperties) {
this.updateSpreadsheetProperties = updateSpreadsheetProperties;
return this;
}
@Override
public Request set(String fieldName, Object value) {
return (Request) super.set(fieldName, value);
}
@Override
public Request clone() {
return (Request) super.clone();
}
}
|
googleapis/google-api-java-client-services | 37,297 | clients/google-api-services-sheets/v4/1.27.0/com/google/api/services/sheets/v4/model/Request.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.sheets.v4.model;
/**
* A single kind of update to apply to a spreadsheet.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Sheets API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Request extends com.google.api.client.json.GenericJson {
/**
* Adds a new banded range
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddBandingRequest addBanding;
/**
* Adds a chart.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddChartRequest addChart;
/**
* Adds a new conditional format rule.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddConditionalFormatRuleRequest addConditionalFormatRule;
/**
* Creates a group over the specified range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddDimensionGroupRequest addDimensionGroup;
/**
* Adds a filter view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddFilterViewRequest addFilterView;
/**
* Adds a named range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddNamedRangeRequest addNamedRange;
/**
* Adds a protected range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddProtectedRangeRequest addProtectedRange;
/**
* Adds a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AddSheetRequest addSheet;
/**
* Appends cells after the last row with data in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AppendCellsRequest appendCells;
/**
* Appends dimensions to the end of a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AppendDimensionRequest appendDimension;
/**
* Automatically fills in more data based on existing data.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AutoFillRequest autoFill;
/**
* Automatically resizes one or more dimensions based on the contents of the cells in that
* dimension.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AutoResizeDimensionsRequest autoResizeDimensions;
/**
* Clears the basic filter on a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClearBasicFilterRequest clearBasicFilter;
/**
* Copies data from one area and pastes it to another.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CopyPasteRequest copyPaste;
/**
* Creates new developer metadata
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreateDeveloperMetadataRequest createDeveloperMetadata;
/**
* Cuts data from one area and pastes it to another.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CutPasteRequest cutPaste;
/**
* Removes a banded range
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteBandingRequest deleteBanding;
/**
* Deletes an existing conditional format rule.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteConditionalFormatRuleRequest deleteConditionalFormatRule;
/**
* Deletes developer metadata
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteDeveloperMetadataRequest deleteDeveloperMetadata;
/**
* Deletes rows or columns in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteDimensionRequest deleteDimension;
/**
* Deletes a group over the specified range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteDimensionGroupRequest deleteDimensionGroup;
/**
* Deletes an embedded object (e.g, chart, image) in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteEmbeddedObjectRequest deleteEmbeddedObject;
/**
* Deletes a filter view from a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteFilterViewRequest deleteFilterView;
/**
* Deletes a named range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteNamedRangeRequest deleteNamedRange;
/**
* Deletes a protected range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteProtectedRangeRequest deleteProtectedRange;
/**
* Deletes a range of cells from a sheet, shifting the remaining cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteRangeRequest deleteRange;
/**
* Deletes a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeleteSheetRequest deleteSheet;
/**
* Duplicates a filter view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DuplicateFilterViewRequest duplicateFilterView;
/**
* Duplicates a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DuplicateSheetRequest duplicateSheet;
/**
* Finds and replaces occurrences of some text with other text.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FindReplaceRequest findReplace;
/**
* Inserts new rows or columns in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InsertDimensionRequest insertDimension;
/**
* Inserts new cells in a sheet, shifting the existing cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InsertRangeRequest insertRange;
/**
* Merges cells together.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private MergeCellsRequest mergeCells;
/**
* Moves rows or columns to another location in a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private MoveDimensionRequest moveDimension;
/**
* Pastes data (HTML or delimited) into a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private PasteDataRequest pasteData;
/**
* Randomizes the order of the rows in a range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RandomizeRangeRequest randomizeRange;
/**
* Repeats a single cell across a range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RepeatCellRequest repeatCell;
/**
* Sets the basic filter on a sheet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SetBasicFilterRequest setBasicFilter;
/**
* Sets data validation for one or more cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SetDataValidationRequest setDataValidation;
/**
* Sorts data in a range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SortRangeRequest sortRange;
/**
* Converts a column of text into many columns of text.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TextToColumnsRequest textToColumns;
/**
* Unmerges merged cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UnmergeCellsRequest unmergeCells;
/**
* Updates a banded range
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateBandingRequest updateBanding;
/**
* Updates the borders in a range of cells.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateBordersRequest updateBorders;
/**
* Updates many cells at once.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateCellsRequest updateCells;
/**
* Updates a chart's specifications.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateChartSpecRequest updateChartSpec;
/**
* Updates an existing conditional format rule.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateConditionalFormatRuleRequest updateConditionalFormatRule;
/**
* Updates an existing developer metadata entry
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateDeveloperMetadataRequest updateDeveloperMetadata;
/**
* Updates the state of the specified group.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateDimensionGroupRequest updateDimensionGroup;
/**
* Updates dimensions' properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateDimensionPropertiesRequest updateDimensionProperties;
/**
* Updates an embedded object's (e.g. chart, image) position.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateEmbeddedObjectPositionRequest updateEmbeddedObjectPosition;
/**
* Updates the properties of a filter view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateFilterViewRequest updateFilterView;
/**
* Updates a named range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateNamedRangeRequest updateNamedRange;
/**
* Updates a protected range.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateProtectedRangeRequest updateProtectedRange;
/**
* Updates a sheet's properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateSheetPropertiesRequest updateSheetProperties;
/**
* Updates the spreadsheet's properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UpdateSpreadsheetPropertiesRequest updateSpreadsheetProperties;
/**
* Adds a new banded range
* @return value or {@code null} for none
*/
public AddBandingRequest getAddBanding() {
return addBanding;
}
/**
* Adds a new banded range
* @param addBanding addBanding or {@code null} for none
*/
public Request setAddBanding(AddBandingRequest addBanding) {
this.addBanding = addBanding;
return this;
}
/**
* Adds a chart.
* @return value or {@code null} for none
*/
public AddChartRequest getAddChart() {
return addChart;
}
/**
* Adds a chart.
* @param addChart addChart or {@code null} for none
*/
public Request setAddChart(AddChartRequest addChart) {
this.addChart = addChart;
return this;
}
/**
* Adds a new conditional format rule.
* @return value or {@code null} for none
*/
public AddConditionalFormatRuleRequest getAddConditionalFormatRule() {
return addConditionalFormatRule;
}
/**
* Adds a new conditional format rule.
* @param addConditionalFormatRule addConditionalFormatRule or {@code null} for none
*/
public Request setAddConditionalFormatRule(AddConditionalFormatRuleRequest addConditionalFormatRule) {
this.addConditionalFormatRule = addConditionalFormatRule;
return this;
}
/**
* Creates a group over the specified range.
* @return value or {@code null} for none
*/
public AddDimensionGroupRequest getAddDimensionGroup() {
return addDimensionGroup;
}
/**
* Creates a group over the specified range.
* @param addDimensionGroup addDimensionGroup or {@code null} for none
*/
public Request setAddDimensionGroup(AddDimensionGroupRequest addDimensionGroup) {
this.addDimensionGroup = addDimensionGroup;
return this;
}
/**
* Adds a filter view.
* @return value or {@code null} for none
*/
public AddFilterViewRequest getAddFilterView() {
return addFilterView;
}
/**
* Adds a filter view.
* @param addFilterView addFilterView or {@code null} for none
*/
public Request setAddFilterView(AddFilterViewRequest addFilterView) {
this.addFilterView = addFilterView;
return this;
}
/**
* Adds a named range.
* @return value or {@code null} for none
*/
public AddNamedRangeRequest getAddNamedRange() {
return addNamedRange;
}
/**
* Adds a named range.
* @param addNamedRange addNamedRange or {@code null} for none
*/
public Request setAddNamedRange(AddNamedRangeRequest addNamedRange) {
this.addNamedRange = addNamedRange;
return this;
}
/**
* Adds a protected range.
* @return value or {@code null} for none
*/
public AddProtectedRangeRequest getAddProtectedRange() {
return addProtectedRange;
}
/**
* Adds a protected range.
* @param addProtectedRange addProtectedRange or {@code null} for none
*/
public Request setAddProtectedRange(AddProtectedRangeRequest addProtectedRange) {
this.addProtectedRange = addProtectedRange;
return this;
}
/**
* Adds a sheet.
* @return value or {@code null} for none
*/
public AddSheetRequest getAddSheet() {
return addSheet;
}
/**
* Adds a sheet.
* @param addSheet addSheet or {@code null} for none
*/
public Request setAddSheet(AddSheetRequest addSheet) {
this.addSheet = addSheet;
return this;
}
/**
* Appends cells after the last row with data in a sheet.
* @return value or {@code null} for none
*/
public AppendCellsRequest getAppendCells() {
return appendCells;
}
/**
* Appends cells after the last row with data in a sheet.
* @param appendCells appendCells or {@code null} for none
*/
public Request setAppendCells(AppendCellsRequest appendCells) {
this.appendCells = appendCells;
return this;
}
/**
* Appends dimensions to the end of a sheet.
* @return value or {@code null} for none
*/
public AppendDimensionRequest getAppendDimension() {
return appendDimension;
}
/**
* Appends dimensions to the end of a sheet.
* @param appendDimension appendDimension or {@code null} for none
*/
public Request setAppendDimension(AppendDimensionRequest appendDimension) {
this.appendDimension = appendDimension;
return this;
}
/**
* Automatically fills in more data based on existing data.
* @return value or {@code null} for none
*/
public AutoFillRequest getAutoFill() {
return autoFill;
}
/**
* Automatically fills in more data based on existing data.
* @param autoFill autoFill or {@code null} for none
*/
public Request setAutoFill(AutoFillRequest autoFill) {
this.autoFill = autoFill;
return this;
}
/**
* Automatically resizes one or more dimensions based on the contents of the cells in that
* dimension.
* @return value or {@code null} for none
*/
public AutoResizeDimensionsRequest getAutoResizeDimensions() {
return autoResizeDimensions;
}
/**
* Automatically resizes one or more dimensions based on the contents of the cells in that
* dimension.
* @param autoResizeDimensions autoResizeDimensions or {@code null} for none
*/
public Request setAutoResizeDimensions(AutoResizeDimensionsRequest autoResizeDimensions) {
this.autoResizeDimensions = autoResizeDimensions;
return this;
}
/**
* Clears the basic filter on a sheet.
* @return value or {@code null} for none
*/
public ClearBasicFilterRequest getClearBasicFilter() {
return clearBasicFilter;
}
/**
* Clears the basic filter on a sheet.
* @param clearBasicFilter clearBasicFilter or {@code null} for none
*/
public Request setClearBasicFilter(ClearBasicFilterRequest clearBasicFilter) {
this.clearBasicFilter = clearBasicFilter;
return this;
}
/**
* Copies data from one area and pastes it to another.
* @return value or {@code null} for none
*/
public CopyPasteRequest getCopyPaste() {
return copyPaste;
}
/**
* Copies data from one area and pastes it to another.
* @param copyPaste copyPaste or {@code null} for none
*/
public Request setCopyPaste(CopyPasteRequest copyPaste) {
this.copyPaste = copyPaste;
return this;
}
/**
* Creates new developer metadata
* @return value or {@code null} for none
*/
public CreateDeveloperMetadataRequest getCreateDeveloperMetadata() {
return createDeveloperMetadata;
}
/**
* Creates new developer metadata
* @param createDeveloperMetadata createDeveloperMetadata or {@code null} for none
*/
public Request setCreateDeveloperMetadata(CreateDeveloperMetadataRequest createDeveloperMetadata) {
this.createDeveloperMetadata = createDeveloperMetadata;
return this;
}
/**
* Cuts data from one area and pastes it to another.
* @return value or {@code null} for none
*/
public CutPasteRequest getCutPaste() {
return cutPaste;
}
/**
* Cuts data from one area and pastes it to another.
* @param cutPaste cutPaste or {@code null} for none
*/
public Request setCutPaste(CutPasteRequest cutPaste) {
this.cutPaste = cutPaste;
return this;
}
/**
* Removes a banded range
* @return value or {@code null} for none
*/
public DeleteBandingRequest getDeleteBanding() {
return deleteBanding;
}
/**
* Removes a banded range
* @param deleteBanding deleteBanding or {@code null} for none
*/
public Request setDeleteBanding(DeleteBandingRequest deleteBanding) {
this.deleteBanding = deleteBanding;
return this;
}
/**
* Deletes an existing conditional format rule.
* @return value or {@code null} for none
*/
public DeleteConditionalFormatRuleRequest getDeleteConditionalFormatRule() {
return deleteConditionalFormatRule;
}
/**
* Deletes an existing conditional format rule.
* @param deleteConditionalFormatRule deleteConditionalFormatRule or {@code null} for none
*/
public Request setDeleteConditionalFormatRule(DeleteConditionalFormatRuleRequest deleteConditionalFormatRule) {
this.deleteConditionalFormatRule = deleteConditionalFormatRule;
return this;
}
/**
* Deletes developer metadata
* @return value or {@code null} for none
*/
public DeleteDeveloperMetadataRequest getDeleteDeveloperMetadata() {
return deleteDeveloperMetadata;
}
/**
* Deletes developer metadata
* @param deleteDeveloperMetadata deleteDeveloperMetadata or {@code null} for none
*/
public Request setDeleteDeveloperMetadata(DeleteDeveloperMetadataRequest deleteDeveloperMetadata) {
this.deleteDeveloperMetadata = deleteDeveloperMetadata;
return this;
}
/**
* Deletes rows or columns in a sheet.
* @return value or {@code null} for none
*/
public DeleteDimensionRequest getDeleteDimension() {
return deleteDimension;
}
/**
* Deletes rows or columns in a sheet.
* @param deleteDimension deleteDimension or {@code null} for none
*/
public Request setDeleteDimension(DeleteDimensionRequest deleteDimension) {
this.deleteDimension = deleteDimension;
return this;
}
/**
* Deletes a group over the specified range.
* @return value or {@code null} for none
*/
public DeleteDimensionGroupRequest getDeleteDimensionGroup() {
return deleteDimensionGroup;
}
/**
* Deletes a group over the specified range.
* @param deleteDimensionGroup deleteDimensionGroup or {@code null} for none
*/
public Request setDeleteDimensionGroup(DeleteDimensionGroupRequest deleteDimensionGroup) {
this.deleteDimensionGroup = deleteDimensionGroup;
return this;
}
/**
* Deletes an embedded object (e.g, chart, image) in a sheet.
* @return value or {@code null} for none
*/
public DeleteEmbeddedObjectRequest getDeleteEmbeddedObject() {
return deleteEmbeddedObject;
}
/**
* Deletes an embedded object (e.g, chart, image) in a sheet.
* @param deleteEmbeddedObject deleteEmbeddedObject or {@code null} for none
*/
public Request setDeleteEmbeddedObject(DeleteEmbeddedObjectRequest deleteEmbeddedObject) {
this.deleteEmbeddedObject = deleteEmbeddedObject;
return this;
}
/**
* Deletes a filter view from a sheet.
* @return value or {@code null} for none
*/
public DeleteFilterViewRequest getDeleteFilterView() {
return deleteFilterView;
}
/**
* Deletes a filter view from a sheet.
* @param deleteFilterView deleteFilterView or {@code null} for none
*/
public Request setDeleteFilterView(DeleteFilterViewRequest deleteFilterView) {
this.deleteFilterView = deleteFilterView;
return this;
}
/**
* Deletes a named range.
* @return value or {@code null} for none
*/
public DeleteNamedRangeRequest getDeleteNamedRange() {
return deleteNamedRange;
}
/**
* Deletes a named range.
* @param deleteNamedRange deleteNamedRange or {@code null} for none
*/
public Request setDeleteNamedRange(DeleteNamedRangeRequest deleteNamedRange) {
this.deleteNamedRange = deleteNamedRange;
return this;
}
/**
* Deletes a protected range.
* @return value or {@code null} for none
*/
public DeleteProtectedRangeRequest getDeleteProtectedRange() {
return deleteProtectedRange;
}
/**
* Deletes a protected range.
* @param deleteProtectedRange deleteProtectedRange or {@code null} for none
*/
public Request setDeleteProtectedRange(DeleteProtectedRangeRequest deleteProtectedRange) {
this.deleteProtectedRange = deleteProtectedRange;
return this;
}
/**
* Deletes a range of cells from a sheet, shifting the remaining cells.
* @return value or {@code null} for none
*/
public DeleteRangeRequest getDeleteRange() {
return deleteRange;
}
/**
* Deletes a range of cells from a sheet, shifting the remaining cells.
* @param deleteRange deleteRange or {@code null} for none
*/
public Request setDeleteRange(DeleteRangeRequest deleteRange) {
this.deleteRange = deleteRange;
return this;
}
/**
* Deletes a sheet.
* @return value or {@code null} for none
*/
public DeleteSheetRequest getDeleteSheet() {
return deleteSheet;
}
/**
* Deletes a sheet.
* @param deleteSheet deleteSheet or {@code null} for none
*/
public Request setDeleteSheet(DeleteSheetRequest deleteSheet) {
this.deleteSheet = deleteSheet;
return this;
}
/**
* Duplicates a filter view.
* @return value or {@code null} for none
*/
public DuplicateFilterViewRequest getDuplicateFilterView() {
return duplicateFilterView;
}
/**
* Duplicates a filter view.
* @param duplicateFilterView duplicateFilterView or {@code null} for none
*/
public Request setDuplicateFilterView(DuplicateFilterViewRequest duplicateFilterView) {
this.duplicateFilterView = duplicateFilterView;
return this;
}
/**
* Duplicates a sheet.
* @return value or {@code null} for none
*/
public DuplicateSheetRequest getDuplicateSheet() {
return duplicateSheet;
}
/**
* Duplicates a sheet.
* @param duplicateSheet duplicateSheet or {@code null} for none
*/
public Request setDuplicateSheet(DuplicateSheetRequest duplicateSheet) {
this.duplicateSheet = duplicateSheet;
return this;
}
/**
* Finds and replaces occurrences of some text with other text.
* @return value or {@code null} for none
*/
public FindReplaceRequest getFindReplace() {
return findReplace;
}
/**
* Finds and replaces occurrences of some text with other text.
* @param findReplace findReplace or {@code null} for none
*/
public Request setFindReplace(FindReplaceRequest findReplace) {
this.findReplace = findReplace;
return this;
}
/**
* Inserts new rows or columns in a sheet.
* @return value or {@code null} for none
*/
public InsertDimensionRequest getInsertDimension() {
return insertDimension;
}
/**
* Inserts new rows or columns in a sheet.
* @param insertDimension insertDimension or {@code null} for none
*/
public Request setInsertDimension(InsertDimensionRequest insertDimension) {
this.insertDimension = insertDimension;
return this;
}
/**
* Inserts new cells in a sheet, shifting the existing cells.
* @return value or {@code null} for none
*/
public InsertRangeRequest getInsertRange() {
return insertRange;
}
/**
* Inserts new cells in a sheet, shifting the existing cells.
* @param insertRange insertRange or {@code null} for none
*/
public Request setInsertRange(InsertRangeRequest insertRange) {
this.insertRange = insertRange;
return this;
}
/**
* Merges cells together.
* @return value or {@code null} for none
*/
public MergeCellsRequest getMergeCells() {
return mergeCells;
}
/**
* Merges cells together.
* @param mergeCells mergeCells or {@code null} for none
*/
public Request setMergeCells(MergeCellsRequest mergeCells) {
this.mergeCells = mergeCells;
return this;
}
/**
* Moves rows or columns to another location in a sheet.
* @return value or {@code null} for none
*/
public MoveDimensionRequest getMoveDimension() {
return moveDimension;
}
/**
* Moves rows or columns to another location in a sheet.
* @param moveDimension moveDimension or {@code null} for none
*/
public Request setMoveDimension(MoveDimensionRequest moveDimension) {
this.moveDimension = moveDimension;
return this;
}
/**
* Pastes data (HTML or delimited) into a sheet.
* @return value or {@code null} for none
*/
public PasteDataRequest getPasteData() {
return pasteData;
}
/**
* Pastes data (HTML or delimited) into a sheet.
* @param pasteData pasteData or {@code null} for none
*/
public Request setPasteData(PasteDataRequest pasteData) {
this.pasteData = pasteData;
return this;
}
/**
* Randomizes the order of the rows in a range.
* @return value or {@code null} for none
*/
public RandomizeRangeRequest getRandomizeRange() {
return randomizeRange;
}
/**
* Randomizes the order of the rows in a range.
* @param randomizeRange randomizeRange or {@code null} for none
*/
public Request setRandomizeRange(RandomizeRangeRequest randomizeRange) {
this.randomizeRange = randomizeRange;
return this;
}
/**
* Repeats a single cell across a range.
* @return value or {@code null} for none
*/
public RepeatCellRequest getRepeatCell() {
return repeatCell;
}
/**
* Repeats a single cell across a range.
* @param repeatCell repeatCell or {@code null} for none
*/
public Request setRepeatCell(RepeatCellRequest repeatCell) {
this.repeatCell = repeatCell;
return this;
}
/**
* Sets the basic filter on a sheet.
* @return value or {@code null} for none
*/
public SetBasicFilterRequest getSetBasicFilter() {
return setBasicFilter;
}
/**
* Sets the basic filter on a sheet.
* @param setBasicFilter setBasicFilter or {@code null} for none
*/
public Request setSetBasicFilter(SetBasicFilterRequest setBasicFilter) {
this.setBasicFilter = setBasicFilter;
return this;
}
/**
* Sets data validation for one or more cells.
* @return value or {@code null} for none
*/
public SetDataValidationRequest getSetDataValidation() {
return setDataValidation;
}
/**
* Sets data validation for one or more cells.
* @param setDataValidation setDataValidation or {@code null} for none
*/
public Request setSetDataValidation(SetDataValidationRequest setDataValidation) {
this.setDataValidation = setDataValidation;
return this;
}
/**
* Sorts data in a range.
* @return value or {@code null} for none
*/
public SortRangeRequest getSortRange() {
return sortRange;
}
/**
* Sorts data in a range.
* @param sortRange sortRange or {@code null} for none
*/
public Request setSortRange(SortRangeRequest sortRange) {
this.sortRange = sortRange;
return this;
}
/**
* Converts a column of text into many columns of text.
* @return value or {@code null} for none
*/
public TextToColumnsRequest getTextToColumns() {
return textToColumns;
}
/**
* Converts a column of text into many columns of text.
* @param textToColumns textToColumns or {@code null} for none
*/
public Request setTextToColumns(TextToColumnsRequest textToColumns) {
this.textToColumns = textToColumns;
return this;
}
/**
* Unmerges merged cells.
* @return value or {@code null} for none
*/
public UnmergeCellsRequest getUnmergeCells() {
return unmergeCells;
}
/**
* Unmerges merged cells.
* @param unmergeCells unmergeCells or {@code null} for none
*/
public Request setUnmergeCells(UnmergeCellsRequest unmergeCells) {
this.unmergeCells = unmergeCells;
return this;
}
/**
* Updates a banded range
* @return value or {@code null} for none
*/
public UpdateBandingRequest getUpdateBanding() {
return updateBanding;
}
/**
* Updates a banded range
* @param updateBanding updateBanding or {@code null} for none
*/
public Request setUpdateBanding(UpdateBandingRequest updateBanding) {
this.updateBanding = updateBanding;
return this;
}
/**
* Updates the borders in a range of cells.
* @return value or {@code null} for none
*/
public UpdateBordersRequest getUpdateBorders() {
return updateBorders;
}
/**
* Updates the borders in a range of cells.
* @param updateBorders updateBorders or {@code null} for none
*/
public Request setUpdateBorders(UpdateBordersRequest updateBorders) {
this.updateBorders = updateBorders;
return this;
}
/**
* Updates many cells at once.
* @return value or {@code null} for none
*/
public UpdateCellsRequest getUpdateCells() {
return updateCells;
}
/**
* Updates many cells at once.
* @param updateCells updateCells or {@code null} for none
*/
public Request setUpdateCells(UpdateCellsRequest updateCells) {
this.updateCells = updateCells;
return this;
}
/**
* Updates a chart's specifications.
* @return value or {@code null} for none
*/
public UpdateChartSpecRequest getUpdateChartSpec() {
return updateChartSpec;
}
/**
* Updates a chart's specifications.
* @param updateChartSpec updateChartSpec or {@code null} for none
*/
public Request setUpdateChartSpec(UpdateChartSpecRequest updateChartSpec) {
this.updateChartSpec = updateChartSpec;
return this;
}
/**
* Updates an existing conditional format rule.
* @return value or {@code null} for none
*/
public UpdateConditionalFormatRuleRequest getUpdateConditionalFormatRule() {
return updateConditionalFormatRule;
}
/**
* Updates an existing conditional format rule.
* @param updateConditionalFormatRule updateConditionalFormatRule or {@code null} for none
*/
public Request setUpdateConditionalFormatRule(UpdateConditionalFormatRuleRequest updateConditionalFormatRule) {
this.updateConditionalFormatRule = updateConditionalFormatRule;
return this;
}
/**
* Updates an existing developer metadata entry
* @return value or {@code null} for none
*/
public UpdateDeveloperMetadataRequest getUpdateDeveloperMetadata() {
return updateDeveloperMetadata;
}
/**
* Updates an existing developer metadata entry
* @param updateDeveloperMetadata updateDeveloperMetadata or {@code null} for none
*/
public Request setUpdateDeveloperMetadata(UpdateDeveloperMetadataRequest updateDeveloperMetadata) {
this.updateDeveloperMetadata = updateDeveloperMetadata;
return this;
}
/**
* Updates the state of the specified group.
* @return value or {@code null} for none
*/
public UpdateDimensionGroupRequest getUpdateDimensionGroup() {
return updateDimensionGroup;
}
/**
* Updates the state of the specified group.
* @param updateDimensionGroup updateDimensionGroup or {@code null} for none
*/
public Request setUpdateDimensionGroup(UpdateDimensionGroupRequest updateDimensionGroup) {
this.updateDimensionGroup = updateDimensionGroup;
return this;
}
/**
* Updates dimensions' properties.
* @return value or {@code null} for none
*/
public UpdateDimensionPropertiesRequest getUpdateDimensionProperties() {
return updateDimensionProperties;
}
/**
* Updates dimensions' properties.
* @param updateDimensionProperties updateDimensionProperties or {@code null} for none
*/
public Request setUpdateDimensionProperties(UpdateDimensionPropertiesRequest updateDimensionProperties) {
this.updateDimensionProperties = updateDimensionProperties;
return this;
}
/**
* Updates an embedded object's (e.g. chart, image) position.
* @return value or {@code null} for none
*/
public UpdateEmbeddedObjectPositionRequest getUpdateEmbeddedObjectPosition() {
return updateEmbeddedObjectPosition;
}
/**
* Updates an embedded object's (e.g. chart, image) position.
* @param updateEmbeddedObjectPosition updateEmbeddedObjectPosition or {@code null} for none
*/
public Request setUpdateEmbeddedObjectPosition(UpdateEmbeddedObjectPositionRequest updateEmbeddedObjectPosition) {
this.updateEmbeddedObjectPosition = updateEmbeddedObjectPosition;
return this;
}
/**
* Updates the properties of a filter view.
* @return value or {@code null} for none
*/
public UpdateFilterViewRequest getUpdateFilterView() {
return updateFilterView;
}
/**
* Updates the properties of a filter view.
* @param updateFilterView updateFilterView or {@code null} for none
*/
public Request setUpdateFilterView(UpdateFilterViewRequest updateFilterView) {
this.updateFilterView = updateFilterView;
return this;
}
/**
* Updates a named range.
* @return value or {@code null} for none
*/
public UpdateNamedRangeRequest getUpdateNamedRange() {
return updateNamedRange;
}
/**
* Updates a named range.
* @param updateNamedRange updateNamedRange or {@code null} for none
*/
public Request setUpdateNamedRange(UpdateNamedRangeRequest updateNamedRange) {
this.updateNamedRange = updateNamedRange;
return this;
}
/**
* Updates a protected range.
* @return value or {@code null} for none
*/
public UpdateProtectedRangeRequest getUpdateProtectedRange() {
return updateProtectedRange;
}
/**
* Updates a protected range.
* @param updateProtectedRange updateProtectedRange or {@code null} for none
*/
public Request setUpdateProtectedRange(UpdateProtectedRangeRequest updateProtectedRange) {
this.updateProtectedRange = updateProtectedRange;
return this;
}
/**
* Updates a sheet's properties.
* @return value or {@code null} for none
*/
public UpdateSheetPropertiesRequest getUpdateSheetProperties() {
return updateSheetProperties;
}
/**
* Updates a sheet's properties.
* @param updateSheetProperties updateSheetProperties or {@code null} for none
*/
public Request setUpdateSheetProperties(UpdateSheetPropertiesRequest updateSheetProperties) {
this.updateSheetProperties = updateSheetProperties;
return this;
}
/**
* Updates the spreadsheet's properties.
* @return value or {@code null} for none
*/
public UpdateSpreadsheetPropertiesRequest getUpdateSpreadsheetProperties() {
return updateSpreadsheetProperties;
}
/**
* Updates the spreadsheet's properties.
* @param updateSpreadsheetProperties updateSpreadsheetProperties or {@code null} for none
*/
public Request setUpdateSpreadsheetProperties(UpdateSpreadsheetPropertiesRequest updateSpreadsheetProperties) {
this.updateSpreadsheetProperties = updateSpreadsheetProperties;
return this;
}
@Override
public Request set(String fieldName, Object value) {
return (Request) super.set(fieldName, value);
}
@Override
public Request clone() {
return (Request) super.clone();
}
}
|
apache/jackrabbit | 37,697 | jackrabbit-jcr-commons/src/main/java/org/apache/jackrabbit/value/ValueHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.value;
import static javax.jcr.PropertyType.BINARY;
import static javax.jcr.PropertyType.BOOLEAN;
import static javax.jcr.PropertyType.DATE;
import static javax.jcr.PropertyType.DECIMAL;
import static javax.jcr.PropertyType.DOUBLE;
import static javax.jcr.PropertyType.LONG;
import static javax.jcr.PropertyType.NAME;
import static javax.jcr.PropertyType.PATH;
import static javax.jcr.PropertyType.REFERENCE;
import static javax.jcr.PropertyType.STRING;
import static javax.jcr.PropertyType.UNDEFINED;
import static javax.jcr.PropertyType.WEAKREFERENCE;
import org.apache.jackrabbit.util.Base64;
import org.apache.jackrabbit.util.Text;
import org.apache.jackrabbit.util.TransientFileFactory;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.Value;
import javax.jcr.ValueFormatException;
import javax.jcr.ValueFactory;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.FilterInputStream;
import java.io.OutputStream;
import java.io.BufferedOutputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* The <code>ValueHelper</code> class provides several <code>Value</code>
* related utility methods.
*/
public class ValueHelper {
/**
* empty private constructor
*/
private ValueHelper() {
}
private static final Map<Integer, Set<Integer>> SUPPORTED_CONVERSIONS = new HashMap<Integer, Set<Integer>>();
static {
SUPPORTED_CONVERSIONS.put(DATE, immutableSetOf(STRING, BINARY, DOUBLE, DECIMAL, LONG));
SUPPORTED_CONVERSIONS.put(DOUBLE, immutableSetOf(STRING, BINARY, DECIMAL, DATE, LONG));
SUPPORTED_CONVERSIONS.put(DECIMAL, immutableSetOf(STRING, BINARY, DOUBLE, DATE, LONG));
SUPPORTED_CONVERSIONS.put(LONG, immutableSetOf(STRING, BINARY, DECIMAL, DATE, DOUBLE));
SUPPORTED_CONVERSIONS.put(BOOLEAN, immutableSetOf(STRING, BINARY));
SUPPORTED_CONVERSIONS.put(NAME, immutableSetOf(STRING, BINARY, PATH, PropertyType.URI));
SUPPORTED_CONVERSIONS.put(PATH, immutableSetOf(STRING, BINARY, NAME, PropertyType.URI));
SUPPORTED_CONVERSIONS.put(PropertyType.URI, immutableSetOf(STRING, BINARY, NAME, PATH));
SUPPORTED_CONVERSIONS.put(REFERENCE, immutableSetOf(STRING, BINARY, WEAKREFERENCE));
SUPPORTED_CONVERSIONS.put(WEAKREFERENCE, immutableSetOf(STRING, BINARY, REFERENCE));
}
private static Set<Integer> immutableSetOf(int... types) {
Set<Integer> t = new HashSet<Integer>();
for (int type : types) {
t.add(type);
}
return Collections.unmodifiableSet(t);
}
public static boolean isSupportedConversion(int fromType, int toType) {
if (fromType == toType) {
return true;
} else if (STRING == fromType || BINARY == fromType) {
return true;
} else {
return SUPPORTED_CONVERSIONS.containsKey(fromType) && SUPPORTED_CONVERSIONS.get(fromType).contains(toType);
}
}
public static void checkSupportedConversion(int fromType, int toType) throws ValueFormatException {
if (!isSupportedConversion(fromType, toType)) {
throw new ValueFormatException("Unsupported conversion from '" + PropertyType.nameFromValue(fromType) + "' to '" + PropertyType.nameFromValue(toType) + '\'');
}
}
/**
* @param srcValue
* @param targetType
* @param factory
* @throws ValueFormatException
* @throws IllegalArgumentException
* @see #convert(Value, int, ValueFactory)
*/
public static Value convert(String srcValue, int targetType, ValueFactory factory)
throws ValueFormatException, IllegalArgumentException {
if (srcValue == null) {
return null;
} else {
return factory.createValue(srcValue, targetType);
}
}
/**
* @param srcValue
* @param targetType
* @param factory
* @throws ValueFormatException
* @throws IllegalArgumentException
*/
public static Value convert(InputStream srcValue, int targetType, ValueFactory factory)
throws ValueFormatException, IllegalArgumentException {
if (srcValue == null) {
return null;
} else {
return convert(factory.createValue(srcValue), targetType, factory);
}
}
/**
* Same as {@link #convert(String[], int, ValueFactory)} using
* <code>ValueFactoryImpl</code>.
*
* @param srcValues
* @param targetType
* @throws ValueFormatException
* @throws IllegalArgumentException
* @see #convert(Value, int, ValueFactory)
*/
public static Value[] convert(String[] srcValues, int targetType, ValueFactory factory)
throws ValueFormatException, IllegalArgumentException {
if (srcValues == null) {
return null;
}
Value[] newValues = new Value[srcValues.length];
for (int i = 0; i < srcValues.length; i++) {
newValues[i] = convert(srcValues[i], targetType, factory);
}
return newValues;
}
/**
* @param srcValues
* @param targetType
* @throws ValueFormatException
* @throws IllegalArgumentException
* @see #convert(Value, int, ValueFactory)
*/
public static Value[] convert(InputStream[] srcValues, int targetType,
ValueFactory factory)
throws ValueFormatException, IllegalArgumentException {
if (srcValues == null) {
return null;
}
Value[] newValues = new Value[srcValues.length];
for (int i = 0; i < srcValues.length; i++) {
newValues[i] = convert(srcValues[i], targetType, factory);
}
return newValues;
}
/**
* @param srcValues
* @param targetType
* @param factory
* @throws ValueFormatException
* @throws IllegalArgumentException
* @see #convert(Value, int, ValueFactory)
*/
public static Value[] convert(Value[] srcValues, int targetType,
ValueFactory factory)
throws ValueFormatException, IllegalArgumentException {
if (srcValues == null) {
return null;
}
Value[] newValues = new Value[srcValues.length];
int srcValueType = PropertyType.UNDEFINED;
for (int i = 0; i < srcValues.length; i++) {
if (srcValues[i] == null) {
newValues[i] = null;
continue;
}
// check type of values
if (srcValueType == PropertyType.UNDEFINED) {
srcValueType = srcValues[i].getType();
} else if (srcValueType != srcValues[i].getType()) {
// inhomogeneous types
String msg = "inhomogeneous type of values";
throw new ValueFormatException(msg);
}
newValues[i] = convert(srcValues[i], targetType, factory);
}
return newValues;
}
/**
* Converts the given value to a value of the specified target type.
* The conversion is performed according to the rules described in
* "3.6.4 Property Type Conversion" in the JSR 283 specification.
*
* @param srcValue
* @param targetType
* @param factory
* @throws ValueFormatException
* @throws IllegalStateException
* @throws IllegalArgumentException
*/
public static Value convert(Value srcValue, int targetType, ValueFactory factory)
throws ValueFormatException, IllegalStateException,
IllegalArgumentException {
if (srcValue == null) {
return null;
}
Value val;
int srcType = srcValue.getType();
if (srcType == targetType) {
// no conversion needed, return original value
return srcValue;
}
switch (targetType) {
case PropertyType.STRING:
// convert to STRING
try {
val = factory.createValue(srcValue.getString());
} catch (RepositoryException re) {
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType), re);
}
break;
case PropertyType.BINARY:
// convert to BINARY
try {
val = factory.createValue(srcValue.getBinary());
} catch (RepositoryException re) {
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType), re);
}
break;
case PropertyType.BOOLEAN:
// convert to BOOLEAN
try {
val = factory.createValue(srcValue.getBoolean());
} catch (RepositoryException re) {
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType), re);
}
break;
case PropertyType.DATE:
// convert to DATE
try {
val = factory.createValue(srcValue.getDate());
} catch (RepositoryException re) {
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType), re);
}
break;
case PropertyType.DOUBLE:
// convert to DOUBLE
try {
val = factory.createValue(srcValue.getDouble());
} catch (RepositoryException re) {
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType), re);
}
break;
case PropertyType.LONG:
// convert to LONG
try {
val = factory.createValue(srcValue.getLong());
} catch (RepositoryException re) {
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType), re);
}
break;
case PropertyType.DECIMAL:
// convert to DECIMAL
try {
val = factory.createValue(srcValue.getDecimal());
} catch (RepositoryException re) {
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType), re);
}
break;
case PropertyType.PATH:
// convert to PATH
switch (srcType) {
case PropertyType.PATH:
// no conversion needed, return original value
// (redundant code, just here for the sake of clarity)
return srcValue;
case PropertyType.BINARY:
case PropertyType.STRING:
case PropertyType.NAME: // a name is always also a relative path
// try conversion via string
String path;
try {
// get string value
path = srcValue.getString();
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to PATH value",
re);
}
// the following call will throw ValueFormatException
// if p is not a valid PATH
val = factory.createValue(path, targetType);
break;
case PropertyType.URI:
URI uri;
try {
uri = URI.create(srcValue.getString());
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to PATH value",
re);
}
if (uri.isAbsolute()) {
// uri contains scheme...
throw new ValueFormatException("failed to convert URI value to PATH value");
}
String p = uri.getPath();
if (p.startsWith("./")) {
p = p.substring(2);
}
// the following call will throw ValueFormatException
// if p is not a valid PATH
val = factory.createValue(p, targetType);
break;
case PropertyType.BOOLEAN:
case PropertyType.DATE:
case PropertyType.DOUBLE:
case PropertyType.DECIMAL:
case PropertyType.LONG:
case PropertyType.REFERENCE:
case PropertyType.WEAKREFERENCE:
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType));
default:
throw new IllegalArgumentException("not a valid type constant: " + srcType);
}
break;
case PropertyType.NAME:
// convert to NAME
switch (srcType) {
case PropertyType.NAME:
// no conversion needed, return original value
// (redundant code, just here for the sake of clarity)
return srcValue;
case PropertyType.BINARY:
case PropertyType.STRING:
case PropertyType.PATH: // path might be a name (relative path of length 1)
// try conversion via string
String name;
try {
// get string value
name = srcValue.getString();
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to NAME value",
re);
}
// the following call will throw ValueFormatException
// if p is not a valid NAME
val = factory.createValue(name, targetType);
break;
case PropertyType.URI:
URI uri;
try {
uri = URI.create(srcValue.getString());
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to NAME value",
re);
}
if (uri.isAbsolute()) {
// uri contains scheme...
throw new ValueFormatException("failed to convert URI value to NAME value");
}
String p = uri.getPath();
if (p.startsWith("./")) {
p = p.substring(2);
}
// the following call will throw ValueFormatException
// if p is not a valid NAME
val = factory.createValue(p, targetType);
break;
case PropertyType.BOOLEAN:
case PropertyType.DATE:
case PropertyType.DOUBLE:
case PropertyType.DECIMAL:
case PropertyType.LONG:
case PropertyType.REFERENCE:
case PropertyType.WEAKREFERENCE:
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType));
default:
throw new IllegalArgumentException("not a valid type constant: " + srcType);
}
break;
case PropertyType.REFERENCE:
// convert to REFERENCE
switch (srcType) {
case PropertyType.REFERENCE:
// no conversion needed, return original value
// (redundant code, just here for the sake of clarity)
return srcValue;
case PropertyType.BINARY:
case PropertyType.STRING:
case PropertyType.WEAKREFERENCE:
// try conversion via string
String uuid;
try {
// get string value
uuid = srcValue.getString();
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to REFERENCE value", re);
}
val = factory.createValue(uuid, targetType);
break;
case PropertyType.BOOLEAN:
case PropertyType.DATE:
case PropertyType.DOUBLE:
case PropertyType.LONG:
case PropertyType.DECIMAL:
case PropertyType.PATH:
case PropertyType.URI:
case PropertyType.NAME:
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType));
default:
throw new IllegalArgumentException("not a valid type constant: " + srcType);
}
break;
case PropertyType.WEAKREFERENCE:
// convert to WEAKREFERENCE
switch (srcType) {
case PropertyType.WEAKREFERENCE:
// no conversion needed, return original value
// (redundant code, just here for the sake of clarity)
return srcValue;
case PropertyType.BINARY:
case PropertyType.STRING:
case PropertyType.REFERENCE:
// try conversion via string
String uuid;
try {
// get string value
uuid = srcValue.getString();
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to WEAKREFERENCE value", re);
}
val = factory.createValue(uuid, targetType);
break;
case PropertyType.BOOLEAN:
case PropertyType.DATE:
case PropertyType.DOUBLE:
case PropertyType.LONG:
case PropertyType.DECIMAL:
case PropertyType.URI:
case PropertyType.PATH:
case PropertyType.NAME:
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType));
default:
throw new IllegalArgumentException("not a valid type constant: " + srcType);
}
break;
case PropertyType.URI:
// convert to URI
switch (srcType) {
case PropertyType.URI:
// no conversion needed, return original value
// (redundant code, just here for the sake of clarity)
return srcValue;
case PropertyType.BINARY:
case PropertyType.STRING:
// try conversion via string
String uuid;
try {
// get string value
uuid = srcValue.getString();
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to URI value", re);
}
val = factory.createValue(uuid, targetType);
break;
case PropertyType.NAME:
String name;
try {
// get string value
name = srcValue.getString();
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to URI value", re);
}
// prefix name with "./" (jsr 283 spec 3.6.4.8)
val = factory.createValue("./" + name, targetType);
break;
case PropertyType.PATH:
String path;
try {
// get string value
path = srcValue.getString();
} catch (RepositoryException re) {
// should never happen
throw new ValueFormatException("failed to convert source value to URI value", re);
}
if (!path.startsWith("/")) {
// prefix non-absolute path with "./" (jsr 283 spec 3.6.4.9)
path = "./" + path;
}
val = factory.createValue(path, targetType);
break;
case PropertyType.BOOLEAN:
case PropertyType.DATE:
case PropertyType.DOUBLE:
case PropertyType.LONG:
case PropertyType.DECIMAL:
case PropertyType.REFERENCE:
case PropertyType.WEAKREFERENCE:
throw new ValueFormatException("conversion failed: "
+ PropertyType.nameFromValue(srcType) + " to "
+ PropertyType.nameFromValue(targetType));
default:
throw new IllegalArgumentException("not a valid type constant: " + srcType);
}
break;
default:
throw new IllegalArgumentException("not a valid type constant: " + targetType);
}
return val;
}
/**
*
* @param srcValue
* @param factory
* @throws IllegalStateException
*/
public static Value copy(Value srcValue, ValueFactory factory)
throws IllegalStateException {
if (srcValue == null) {
return null;
}
Value newVal = null;
try {
switch (srcValue.getType()) {
case PropertyType.BINARY:
newVal = factory.createValue(srcValue.getStream());
break;
case PropertyType.BOOLEAN:
newVal = factory.createValue(srcValue.getBoolean());
break;
case PropertyType.DATE:
newVal = factory.createValue(srcValue.getDate());
break;
case PropertyType.DOUBLE:
newVal = factory.createValue(srcValue.getDouble());
break;
case PropertyType.LONG:
newVal = factory.createValue(srcValue.getLong());
break;
case PropertyType.DECIMAL:
newVal = factory.createValue(srcValue.getDecimal());
break;
case PropertyType.PATH:
case PropertyType.NAME:
case PropertyType.REFERENCE:
case PropertyType.WEAKREFERENCE:
case PropertyType.URI:
newVal = factory.createValue(srcValue.getString(), srcValue.getType());
break;
case PropertyType.STRING:
newVal = factory.createValue(srcValue.getString());
break;
}
} catch (RepositoryException re) {
// should never get here
}
return newVal;
}
/**
* @param srcValues
* @param factory
* @throws IllegalStateException
*/
public static Value[] copy(Value[] srcValues, ValueFactory factory)
throws IllegalStateException {
if (srcValues == null) {
return null;
}
Value[] newValues = new Value[srcValues.length];
for (int i = 0; i < srcValues.length; i++) {
newValues[i] = copy(srcValues[i], factory);
}
return newValues;
}
/**
* Serializes the given value to a <code>String</code>. The serialization
* format is the same as used by Document & System View XML, i.e.
* binary values will be Base64-encoded whereas for all others
* <code>{@link Value#getString()}</code> will be used.
*
* @param value the value to be serialized
* @param encodeBlanks if <code>true</code> space characters will be encoded
* as <code>"_x0020_"</code> within he output string.
* @return a string representation of the given value.
* @throws IllegalStateException if the given value is in an illegal state
* @throws RepositoryException if an error occured during the serialization.
*/
public static String serialize(Value value, boolean encodeBlanks)
throws IllegalStateException, RepositoryException {
StringWriter writer = new StringWriter();
try {
serialize(value, encodeBlanks, false, writer);
} catch (IOException ioe) {
throw new RepositoryException("failed to serialize value",
ioe);
}
return writer.toString();
}
/**
* Outputs the serialized value to a <code>Writer</code>. The serialization
* format is the same as used by Document & System View XML, i.e.
* binary values will be Base64-encoded whereas for all others
* <code>{@link Value#getString()}</code> will be used for serialization.
*
* @param value the value to be serialized
* @param encodeBlanks if <code>true</code> space characters will be encoded
* as <code>"_x0020_"</code> within he output string.
* @param enforceBase64 if <code>true</code>, base64 encoding will always be used
* @param writer writer to output the encoded data
* @throws IllegalStateException if the given value is in an illegal state
* @throws IOException if an i/o error occured during the
* serialization
* @throws RepositoryException if an error occured during the serialization.
*/
public static void serialize(Value value, boolean encodeBlanks, boolean enforceBase64,
Writer writer)
throws IllegalStateException, IOException, RepositoryException {
if (value.getType() == PropertyType.BINARY) {
// binary data, base64 encoding required;
// the encodeBlanks flag can be ignored since base64-encoded
// data cannot contain space characters
InputStream in = value.getStream();
try {
Base64.encode(in, writer);
// no need to close StringWriter
//writer.close();
} finally {
try {
in.close();
} catch (IOException e) {
// ignore
}
}
} else {
String textVal = value.getString();
if (enforceBase64) {
byte bytes[] = textVal.getBytes(StandardCharsets.UTF_8);
Base64.encode(bytes, 0, bytes.length, writer);
}
else {
if (encodeBlanks) {
// enocde blanks in string
textVal = Text.replace(textVal, " ", "_x0020_");
}
writer.write(textVal);
}
}
}
/**
* Deserializes the given string to a <code>Value</code> of the given type.
*
* @param value string to be deserialized
* @param type type of value
* @param decodeBlanks if <code>true</code> <code>"_x0020_"</code>
* character sequences will be decoded to single space
* characters each.
* @param factory ValueFactory used to build the <code>Value</code> object.
* @return the deserialized <code>Value</code>
* @throws ValueFormatException if the string data is not of the required
* format
* @throws RepositoryException if an error occured during the
* deserialization.
*/
public static Value deserialize(String value, int type, boolean decodeBlanks,
ValueFactory factory)
throws ValueFormatException, RepositoryException {
if (type == PropertyType.BINARY) {
// base64 encoded binary value;
// the encodeBlanks flag can be ignored since base64-encoded
// data cannot contain encoded space characters
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
Base64.decode(value, baos);
// no need to close ByteArrayOutputStream
//baos.close();
} catch (IOException ioe) {
throw new RepositoryException("failed to decode binary value",
ioe);
}
// NOTE: for performance reasons the BinaryValue is created directly
// from the byte-array. This is inconsistent with the other calls,
// that delegate the value creation to the ValueFactory.
return new BinaryValue(baos.toByteArray());
} else {
if (decodeBlanks) {
// decode encoded blanks in value
value = Text.replace(value, "_x0020_", " ");
}
return convert(value, type, factory);
}
}
/**
* Deserializes the string data read from the given reader to a
* <code>Value</code> of the given type.
*
* @param reader reader for the string data to be deserialized
* @param type type of value
* @param decodeBlanks if <code>true</code> <code>"_x0020_"</code>
* character sequences will be decoded to single space
* characters each.
* @param factory ValueFactory used to build the <code>Value</code> object.
* @return the deserialized <code>Value</code>
* @throws IOException if an i/o error occured during the
* serialization
* @throws ValueFormatException if the string data is not of the required
* format
* @throws RepositoryException if an error occured during the
* deserialization.
*/
public static Value deserialize(Reader reader, int type,
boolean decodeBlanks, ValueFactory factory)
throws IOException, ValueFormatException, RepositoryException {
if (type == PropertyType.BINARY) {
// base64 encoded binary value;
// the encodeBlanks flag can be ignored since base64-encoded
// data cannot contain encoded space characters
// decode to temp file
TransientFileFactory fileFactory = TransientFileFactory.getInstance();
final File tmpFile = fileFactory.createTransientFile("bin", null, null);
OutputStream out = new BufferedOutputStream(new FileOutputStream(tmpFile));
try {
Base64.decode(reader, out);
} finally {
out.close();
}
// create an InputStream that keeps a hard reference to the temp file
// in order to prevent its automatic deletion once the associated
// File object is reclaimed by the garbage collector;
// pass InputStream wrapper to ValueFactory, that creates a BinaryValue.
return factory.createValue(new FilterInputStream(new FileInputStream(tmpFile)) {
public void close() throws IOException {
in.close();
// temp file can now safely be removed
tmpFile.delete();
}
});
/*
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Base64.decode(reader, baos);
// no need to close ByteArrayOutputStream
//baos.close();
return new BinaryValue(baos.toByteArray());
*/
} else {
char[] chunk = new char[8192];
int read;
StringBuilder buf = new StringBuilder();
while ((read = reader.read(chunk)) > -1) {
buf.append(chunk, 0, read);
}
String value = buf.toString();
if (decodeBlanks) {
// decode encoded blanks in value
value = Text.replace(value, "_x0020_", " ");
}
return convert(value, type, factory);
}
}
/**
* Determine the {@link javax.jcr.PropertyType} of the passed values if all are of
* the same type.
*
* @param values array of values of the same type
* @return {@link javax.jcr.PropertyType#UNDEFINED} if {@code values} is empty,
* {@code values[0].getType()} otherwise.
* @throws javax.jcr.ValueFormatException if not all {@code values} are of the same type
*/
public static int getType(Value[] values) throws ValueFormatException {
int type = UNDEFINED;
for (Value value : values) {
if (value != null) {
if (type == UNDEFINED) {
type = value.getType();
} else if (value.getType() != type) {
throw new ValueFormatException(
"All values of a multi-valued property must be of the same type");
}
}
}
return type;
}
}
|
googleapis/google-cloud-java | 37,431 | java-service-management/proto-google-cloud-service-management-v1/src/main/java/com/google/api/servicemanagement/v1/ListServiceRolloutsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/servicemanagement/v1/servicemanager.proto
// Protobuf Java Version: 3.25.8
package com.google.api.servicemanagement.v1;
/**
*
*
* <pre>
* Request message for 'ListServiceRollouts'
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.ListServiceRolloutsRequest}
*/
public final class ListServiceRolloutsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.servicemanagement.v1.ListServiceRolloutsRequest)
ListServiceRolloutsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServiceRolloutsRequest.newBuilder() to construct.
private ListServiceRolloutsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServiceRolloutsRequest() {
serviceName_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServiceRolloutsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceRolloutsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceRolloutsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest.class,
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest.Builder.class);
}
public static final int SERVICE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object serviceName_ = "";
/**
*
*
* <pre>
* Required. The name of the service. See the
* [overview](https://cloud.google.com/service-management/overview) for naming
* requirements. For example: `example.googleapis.com`.
* </pre>
*
* <code>string service_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The serviceName.
*/
@java.lang.Override
public java.lang.String getServiceName() {
java.lang.Object ref = serviceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the service. See the
* [overview](https://cloud.google.com/service-management/overview) for naming
* requirements. For example: `example.googleapis.com`.
* </pre>
*
* <code>string service_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for serviceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getServiceNameBytes() {
java.lang.Object ref = serviceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
serviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The token of the page to retrieve.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token of the page to retrieve.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The max number of items to include in the response list. Page size is 50
* if not specified. Maximum value is 100.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Required. Use `filter` to return subset of rollouts.
* The following filters are supported:
*
* -- By [status]
* [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For example,
* `filter='status=SUCCESS'`
*
* -- By [strategy]
* [google.api.servicemanagement.v1.Rollout.strategy]. For example,
* `filter='strategy=TrafficPercentStrategy'`
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Use `filter` to return subset of rollouts.
* The following filters are supported:
*
* -- By [status]
* [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For example,
* `filter='status=SUCCESS'`
*
* -- By [strategy]
* [google.api.servicemanagement.v1.Rollout.strategy]. For example,
* `filter='strategy=TrafficPercentStrategy'`
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, serviceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, serviceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.servicemanagement.v1.ListServiceRolloutsRequest)) {
return super.equals(obj);
}
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest other =
(com.google.api.servicemanagement.v1.ListServiceRolloutsRequest) obj;
if (!getServiceName().equals(other.getServiceName())) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getServiceName().hashCode();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for 'ListServiceRollouts'
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.ListServiceRolloutsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.servicemanagement.v1.ListServiceRolloutsRequest)
com.google.api.servicemanagement.v1.ListServiceRolloutsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceRolloutsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceRolloutsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest.class,
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest.Builder.class);
}
// Construct using com.google.api.servicemanagement.v1.ListServiceRolloutsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
serviceName_ = "";
pageToken_ = "";
pageSize_ = 0;
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceRolloutsRequest_descriptor;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceRolloutsRequest
getDefaultInstanceForType() {
return com.google.api.servicemanagement.v1.ListServiceRolloutsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceRolloutsRequest build() {
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceRolloutsRequest buildPartial() {
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest result =
new com.google.api.servicemanagement.v1.ListServiceRolloutsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.api.servicemanagement.v1.ListServiceRolloutsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.serviceName_ = serviceName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.servicemanagement.v1.ListServiceRolloutsRequest) {
return mergeFrom((com.google.api.servicemanagement.v1.ListServiceRolloutsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.servicemanagement.v1.ListServiceRolloutsRequest other) {
if (other
== com.google.api.servicemanagement.v1.ListServiceRolloutsRequest.getDefaultInstance())
return this;
if (!other.getServiceName().isEmpty()) {
serviceName_ = other.serviceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
serviceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object serviceName_ = "";
/**
*
*
* <pre>
* Required. The name of the service. See the
* [overview](https://cloud.google.com/service-management/overview) for naming
* requirements. For example: `example.googleapis.com`.
* </pre>
*
* <code>string service_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The serviceName.
*/
public java.lang.String getServiceName() {
java.lang.Object ref = serviceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the service. See the
* [overview](https://cloud.google.com/service-management/overview) for naming
* requirements. For example: `example.googleapis.com`.
* </pre>
*
* <code>string service_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for serviceName.
*/
public com.google.protobuf.ByteString getServiceNameBytes() {
java.lang.Object ref = serviceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
serviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the service. See the
* [overview](https://cloud.google.com/service-management/overview) for naming
* requirements. For example: `example.googleapis.com`.
* </pre>
*
* <code>string service_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The serviceName to set.
* @return This builder for chaining.
*/
public Builder setServiceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
serviceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the service. See the
* [overview](https://cloud.google.com/service-management/overview) for naming
* requirements. For example: `example.googleapis.com`.
* </pre>
*
* <code>string service_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearServiceName() {
serviceName_ = getDefaultInstance().getServiceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the service. See the
* [overview](https://cloud.google.com/service-management/overview) for naming
* requirements. For example: `example.googleapis.com`.
* </pre>
*
* <code>string service_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for serviceName to set.
* @return This builder for chaining.
*/
public Builder setServiceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
serviceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The token of the page to retrieve.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token of the page to retrieve.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token of the page to retrieve.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token of the page to retrieve.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token of the page to retrieve.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The max number of items to include in the response list. Page size is 50
* if not specified. Maximum value is 100.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The max number of items to include in the response list. Page size is 50
* if not specified. Maximum value is 100.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The max number of items to include in the response list. Page size is 50
* if not specified. Maximum value is 100.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Required. Use `filter` to return subset of rollouts.
* The following filters are supported:
*
* -- By [status]
* [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For example,
* `filter='status=SUCCESS'`
*
* -- By [strategy]
* [google.api.servicemanagement.v1.Rollout.strategy]. For example,
* `filter='strategy=TrafficPercentStrategy'`
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Use `filter` to return subset of rollouts.
* The following filters are supported:
*
* -- By [status]
* [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For example,
* `filter='status=SUCCESS'`
*
* -- By [strategy]
* [google.api.servicemanagement.v1.Rollout.strategy]. For example,
* `filter='strategy=TrafficPercentStrategy'`
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Use `filter` to return subset of rollouts.
* The following filters are supported:
*
* -- By [status]
* [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For example,
* `filter='status=SUCCESS'`
*
* -- By [strategy]
* [google.api.servicemanagement.v1.Rollout.strategy]. For example,
* `filter='strategy=TrafficPercentStrategy'`
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Use `filter` to return subset of rollouts.
* The following filters are supported:
*
* -- By [status]
* [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For example,
* `filter='status=SUCCESS'`
*
* -- By [strategy]
* [google.api.servicemanagement.v1.Rollout.strategy]. For example,
* `filter='strategy=TrafficPercentStrategy'`
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Use `filter` to return subset of rollouts.
* The following filters are supported:
*
* -- By [status]
* [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For example,
* `filter='status=SUCCESS'`
*
* -- By [strategy]
* [google.api.servicemanagement.v1.Rollout.strategy]. For example,
* `filter='strategy=TrafficPercentStrategy'`
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.servicemanagement.v1.ListServiceRolloutsRequest)
}
// @@protoc_insertion_point(class_scope:google.api.servicemanagement.v1.ListServiceRolloutsRequest)
private static final com.google.api.servicemanagement.v1.ListServiceRolloutsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.servicemanagement.v1.ListServiceRolloutsRequest();
}
public static com.google.api.servicemanagement.v1.ListServiceRolloutsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServiceRolloutsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListServiceRolloutsRequest>() {
@java.lang.Override
public ListServiceRolloutsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServiceRolloutsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServiceRolloutsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceRolloutsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,490 | java-cloudcommerceconsumerprocurement/proto-google-cloud-cloudcommerceconsumerprocurement-v1/src/main/java/com/google/cloud/commerce/consumer/procurement/v1/UpdateLicensePoolRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/commerce/consumer/procurement/v1/license_management_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.commerce.consumer.procurement.v1;
/**
*
*
* <pre>
* Request message for updating a license pool.
* </pre>
*
* Protobuf type {@code google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest}
*/
public final class UpdateLicensePoolRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest)
UpdateLicensePoolRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateLicensePoolRequest.newBuilder() to construct.
private UpdateLicensePoolRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateLicensePoolRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateLicensePoolRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.commerce.consumer.procurement.v1.LicenseManagementServiceProto
.internal_static_google_cloud_commerce_consumer_procurement_v1_UpdateLicensePoolRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.commerce.consumer.procurement.v1.LicenseManagementServiceProto
.internal_static_google_cloud_commerce_consumer_procurement_v1_UpdateLicensePoolRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest.class,
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest.Builder
.class);
}
private int bitField0_;
public static final int LICENSE_POOL_FIELD_NUMBER = 1;
private com.google.cloud.commerce.consumer.procurement.v1.LicensePool licensePool_;
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the licensePool field is set.
*/
@java.lang.Override
public boolean hasLicensePool() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The licensePool.
*/
@java.lang.Override
public com.google.cloud.commerce.consumer.procurement.v1.LicensePool getLicensePool() {
return licensePool_ == null
? com.google.cloud.commerce.consumer.procurement.v1.LicensePool.getDefaultInstance()
: licensePool_;
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.commerce.consumer.procurement.v1.LicensePoolOrBuilder
getLicensePoolOrBuilder() {
return licensePool_ == null
? com.google.cloud.commerce.consumer.procurement.v1.LicensePool.getDefaultInstance()
: licensePool_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getLicensePool());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getLicensePool());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest)) {
return super.equals(obj);
}
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest other =
(com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest) obj;
if (hasLicensePool() != other.hasLicensePool()) return false;
if (hasLicensePool()) {
if (!getLicensePool().equals(other.getLicensePool())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLicensePool()) {
hash = (37 * hash) + LICENSE_POOL_FIELD_NUMBER;
hash = (53 * hash) + getLicensePool().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for updating a license pool.
* </pre>
*
* Protobuf type {@code google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest)
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.commerce.consumer.procurement.v1.LicenseManagementServiceProto
.internal_static_google_cloud_commerce_consumer_procurement_v1_UpdateLicensePoolRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.commerce.consumer.procurement.v1.LicenseManagementServiceProto
.internal_static_google_cloud_commerce_consumer_procurement_v1_UpdateLicensePoolRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest.class,
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest.Builder
.class);
}
// Construct using
// com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getLicensePoolFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
licensePool_ = null;
if (licensePoolBuilder_ != null) {
licensePoolBuilder_.dispose();
licensePoolBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.commerce.consumer.procurement.v1.LicenseManagementServiceProto
.internal_static_google_cloud_commerce_consumer_procurement_v1_UpdateLicensePoolRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
getDefaultInstanceForType() {
return com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest build() {
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
buildPartial() {
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest result =
new com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.licensePool_ =
licensePoolBuilder_ == null ? licensePool_ : licensePoolBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest) {
return mergeFrom(
(com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest other) {
if (other
== com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
.getDefaultInstance()) return this;
if (other.hasLicensePool()) {
mergeLicensePool(other.getLicensePool());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getLicensePoolFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.commerce.consumer.procurement.v1.LicensePool licensePool_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.commerce.consumer.procurement.v1.LicensePool,
com.google.cloud.commerce.consumer.procurement.v1.LicensePool.Builder,
com.google.cloud.commerce.consumer.procurement.v1.LicensePoolOrBuilder>
licensePoolBuilder_;
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the licensePool field is set.
*/
public boolean hasLicensePool() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The licensePool.
*/
public com.google.cloud.commerce.consumer.procurement.v1.LicensePool getLicensePool() {
if (licensePoolBuilder_ == null) {
return licensePool_ == null
? com.google.cloud.commerce.consumer.procurement.v1.LicensePool.getDefaultInstance()
: licensePool_;
} else {
return licensePoolBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setLicensePool(
com.google.cloud.commerce.consumer.procurement.v1.LicensePool value) {
if (licensePoolBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
licensePool_ = value;
} else {
licensePoolBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setLicensePool(
com.google.cloud.commerce.consumer.procurement.v1.LicensePool.Builder builderForValue) {
if (licensePoolBuilder_ == null) {
licensePool_ = builderForValue.build();
} else {
licensePoolBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeLicensePool(
com.google.cloud.commerce.consumer.procurement.v1.LicensePool value) {
if (licensePoolBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& licensePool_ != null
&& licensePool_
!= com.google.cloud.commerce.consumer.procurement.v1.LicensePool
.getDefaultInstance()) {
getLicensePoolBuilder().mergeFrom(value);
} else {
licensePool_ = value;
}
} else {
licensePoolBuilder_.mergeFrom(value);
}
if (licensePool_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearLicensePool() {
bitField0_ = (bitField0_ & ~0x00000001);
licensePool_ = null;
if (licensePoolBuilder_ != null) {
licensePoolBuilder_.dispose();
licensePoolBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.commerce.consumer.procurement.v1.LicensePool.Builder
getLicensePoolBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getLicensePoolFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.commerce.consumer.procurement.v1.LicensePoolOrBuilder
getLicensePoolOrBuilder() {
if (licensePoolBuilder_ != null) {
return licensePoolBuilder_.getMessageOrBuilder();
} else {
return licensePool_ == null
? com.google.cloud.commerce.consumer.procurement.v1.LicensePool.getDefaultInstance()
: licensePool_;
}
}
/**
*
*
* <pre>
* Required. The license pool to update.
*
* The license pool's name field is used to identify the license pool to
* update. Format:
* `billingAccounts/{billing_account}/orders/{order}/licensePool`.
* </pre>
*
* <code>
* .google.cloud.commerce.consumer.procurement.v1.LicensePool license_pool = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.commerce.consumer.procurement.v1.LicensePool,
com.google.cloud.commerce.consumer.procurement.v1.LicensePool.Builder,
com.google.cloud.commerce.consumer.procurement.v1.LicensePoolOrBuilder>
getLicensePoolFieldBuilder() {
if (licensePoolBuilder_ == null) {
licensePoolBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.commerce.consumer.procurement.v1.LicensePool,
com.google.cloud.commerce.consumer.procurement.v1.LicensePool.Builder,
com.google.cloud.commerce.consumer.procurement.v1.LicensePoolOrBuilder>(
getLicensePool(), getParentForChildren(), isClean());
licensePool_ = null;
}
return licensePoolBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest)
private static final com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest();
}
public static com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateLicensePoolRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateLicensePoolRequest>() {
@java.lang.Override
public UpdateLicensePoolRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateLicensePoolRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateLicensePoolRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.commerce.consumer.procurement.v1.UpdateLicensePoolRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 34,985 | hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacyDialect.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.community.dialect;
import jakarta.persistence.TemporalType;
import jakarta.persistence.Timeout;
import org.hibernate.LockOptions;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.community.dialect.function.DerbyLpadEmulation;
import org.hibernate.community.dialect.function.DerbyRpadEmulation;
import org.hibernate.community.dialect.pagination.DerbyLimitHandler;
import org.hibernate.community.dialect.sequence.DerbySequenceSupport;
import org.hibernate.community.dialect.sequence.SequenceInformationExtractorDerbyDatabaseImpl;
import org.hibernate.community.dialect.temptable.DerbyLocalTemporaryTableStrategy;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.DmlTargetColumnQualifierSupport;
import org.hibernate.dialect.NationalizationSupport;
import org.hibernate.dialect.RowLockStrategy;
import org.hibernate.dialect.function.CaseLeastGreatestEmulation;
import org.hibernate.dialect.function.CastingConcatFunction;
import org.hibernate.dialect.function.ChrLiteralEmulation;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.CountFunction;
import org.hibernate.dialect.function.InsertSubstringOverlayEmulation;
import org.hibernate.dialect.identity.DB2IdentityColumnSupport;
import org.hibernate.dialect.identity.IdentityColumnSupport;
import org.hibernate.dialect.lock.internal.LockingSupportSimple;
import org.hibernate.dialect.lock.spi.LockingSupport;
import org.hibernate.dialect.pagination.AbstractLimitHandler;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.temptable.TemporaryTableKind;
import org.hibernate.dialect.temptable.TemporaryTableStrategy;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.exception.ConstraintViolationException;
import org.hibernate.exception.LockTimeoutException;
import org.hibernate.exception.spi.SQLExceptionConversionDelegate;
import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor;
import org.hibernate.exception.spi.ViolatedConstraintNameExtractor;
import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.query.common.TemporalUnit;
import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.mutation.internal.temptable.LocalTemporaryTableInsertStrategy;
import org.hibernate.query.sqm.mutation.internal.temptable.LocalTemporaryTableMutationStrategy;
import org.hibernate.query.sqm.mutation.spi.BeforeUseAction;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableInsertStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.internal.PessimisticLockKind;
import org.hibernate.sql.ast.spi.LockingClauseStrategy;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorNoOpImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.BigDecimalJavaType;
import org.hibernate.type.descriptor.jdbc.ObjectNullResolvingJdbcType;
import org.hibernate.type.descriptor.jdbc.SmallIntJdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.sql.Types;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
import static org.hibernate.type.SqlTypes.BOOLEAN;
import static org.hibernate.type.SqlTypes.CHAR;
import static org.hibernate.type.SqlTypes.CLOB;
import static org.hibernate.type.SqlTypes.DECIMAL;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
import static org.hibernate.type.SqlTypes.NCHAR;
import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TIME_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.VARBINARY;
import static org.hibernate.type.SqlTypes.VARCHAR;
/**
* A {@linkplain Dialect SQL dialect} for Apache Derby.
*
* @author Simon Johnston
* @author Gavin King
*
*/
public class DerbyLegacyDialect extends Dialect {
// KNOWN LIMITATIONS:
// no support for nationalized data (nchar, nvarchar, nclob)
// * limited set of fields for extract()
// (no 'day of xxxx', nor 'week of xxxx')
// * no support for format()
// * pad() can only pad with blanks
// * can't cast String to Binary
// * can't select a parameter unless wrapped
// in a cast or function call
private final LimitHandler limitHandler = getVersion().isBefore( 10, 5 )
? AbstractLimitHandler.NO_LIMIT
: new DerbyLimitHandler( getVersion().isSameOrAfter( 10, 6 ) );
public DerbyLegacyDialect() {
this( DatabaseVersion.make( 10, 0 ) );
}
public DerbyLegacyDialect(DatabaseVersion version) {
super(version);
}
public DerbyLegacyDialect(DialectResolutionInfo info) {
super(info);
}
@Override
protected String columnType(int sqlTypeCode) {
return switch ( sqlTypeCode ) {
case BOOLEAN -> getVersion().isBefore( 10, 7 ) ? "smallint" : super.columnType( sqlTypeCode );
//no tinyint
case TINYINT -> "smallint";
// HHH-12827: map them both to the same type to avoid problems with schema update
// Note that 31 is the maximum precision Derby supports
case NUMERIC -> columnType( DECIMAL );
case VARBINARY -> "varchar($l) for bit data";
case NCHAR -> columnType( CHAR );
case NVARCHAR -> columnType( VARCHAR );
case BLOB -> "blob";
case CLOB, NCLOB -> "clob";
case TIME, TIME_WITH_TIMEZONE -> "time";
case TIMESTAMP, TIMESTAMP_WITH_TIMEZONE -> "timestamp";
default -> super.columnType( sqlTypeCode );
};
}
@Override
protected void registerColumnTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.registerColumnTypes( typeContributions, serviceRegistry );
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
int varcharDdlTypeCapacity = 32_672;
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
VARBINARY,
isLob( LONG32VARBINARY )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARBINARY ),
columnType( VARBINARY ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARBINARY ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
VARCHAR,
isLob( LONG32VARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( VARCHAR ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
NVARCHAR,
isLob( LONG32NVARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( NVARCHAR ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( NVARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
BINARY,
isLob( LONG32VARBINARY )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARBINARY ),
columnType( VARBINARY ),
this
)
.withTypeCapacity( 254, "char($l) for bit data" )
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARBINARY ) )
.build()
);
// This is the maximum size for the CHAR datatype on Derby
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
CHAR,
isLob( LONG32VARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( CHAR ),
this
)
.withTypeCapacity( 254, columnType( CHAR ) )
.withTypeCapacity( getMaxVarcharLength(), columnType( VARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
NCHAR,
isLob( LONG32NVARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32NVARCHAR ),
columnType( NCHAR ),
this
)
.withTypeCapacity( 254, columnType( NCHAR ) )
.withTypeCapacity( getMaxVarcharLength(), columnType( NVARCHAR ) )
.build()
);
}
@Override
public int getMaxVarcharLength() {
return 32_672;
}
@Override
public int getMaxVarcharCapacity() {
return 32_700;
}
@Override
public int getDefaultDecimalPrecision() {
//this is the maximum allowed in Derby
return 31;
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return getVersion().isBefore( 10, 7 )
? Types.SMALLINT
: Types.BOOLEAN;
}
@Override
public NationalizationSupport getNationalizationSupport() {
return NationalizationSupport.IMPLICIT;
}
@Override
public int getDefaultStatementBatchSize() {
return 15;
}
@Override
public int getFloatPrecision() {
return 23;
}
@Override
public int getDoublePrecision() {
return 52;
}
@Override
public int getDefaultTimestampPrecision() {
return 9;
}
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions);
final BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING );
final DdlTypeRegistry ddlTypeRegistry = functionContributions.getTypeConfiguration().getDdlTypeRegistry();
final CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
// Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionContributions.getFunctionRegistry().register(
"count",
new CountFunction(
this,
functionContributions.getTypeConfiguration(),
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
"||",
ddlTypeRegistry.getDescriptor( VARCHAR )
.getCastTypeName( Size.nil(), stringType, ddlTypeRegistry ),
true
)
);
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT );
// Note that Derby does not have chr() / ascii() functions.
// It does have a function named char(), but it's really a
// sort of to_char() function.
// We register an emulation instead, that can at least translate integer literals
functionContributions.getFunctionRegistry().register(
"chr",
new ChrLiteralEmulation( functionContributions.getTypeConfiguration() )
);
functionFactory.concat_pipeOperator();
functionFactory.cot();
functionFactory.degrees();
functionFactory.radians();
functionFactory.log10();
functionFactory.sinh();
functionFactory.cosh();
functionFactory.tanh();
functionFactory.pi();
functionFactory.rand();
functionFactory.trim1();
functionFactory.hourMinuteSecond();
functionFactory.yearMonthDay();
functionFactory.varPopSamp();
functionFactory.stddevPopSamp();
functionFactory.substring_substr();
functionFactory.leftRight_substrLength();
functionFactory.characterLength_length( SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionFactory.power_expLn();
functionFactory.round_floor();
functionFactory.trunc_floor();
functionFactory.octetLength_pattern( "length(?1)", SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionFactory.bitLength_pattern( "length(?1)*8", SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionContributions.getFunctionRegistry().register(
"concat",
new CastingConcatFunction(
this,
"||",
true,
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
functionContributions.getTypeConfiguration()
)
);
//no way I can see to pad with anything other than spaces
functionContributions.getFunctionRegistry().register( "lpad", new DerbyLpadEmulation( functionContributions.getTypeConfiguration() ) );
functionContributions.getFunctionRegistry().register( "rpad", new DerbyRpadEmulation( functionContributions.getTypeConfiguration() ) );
functionContributions.getFunctionRegistry().register( "least", new CaseLeastGreatestEmulation( true ) );
functionContributions.getFunctionRegistry().register( "greatest", new CaseLeastGreatestEmulation( false ) );
functionContributions.getFunctionRegistry().register( "overlay", new InsertSubstringOverlayEmulation( functionContributions.getTypeConfiguration(), true ) );
}
@Override
public SqlAstTranslatorFactory getSqlAstTranslatorFactory() {
return new StandardSqlAstTranslatorFactory() {
@Override
protected <T extends JdbcOperation> SqlAstTranslator<T> buildTranslator(
SessionFactoryImplementor sessionFactory, Statement statement) {
return new DerbyLegacySqlAstTranslator<>( sessionFactory, statement );
}
};
}
/**
* Derby doesn't have an extract() function, and has
* no functions at all for calendaring, but we can
* emulate the most basic functionality of extract()
* using the functions it does have.
*
* The only supported {@link TemporalUnit}s are:
* {@link TemporalUnit#YEAR},
* {@link TemporalUnit#MONTH}
* {@link TemporalUnit#DAY},
* {@link TemporalUnit#HOUR},
* {@link TemporalUnit#MINUTE},
* {@link TemporalUnit#SECOND} (along with
* {@link TemporalUnit#NANOSECOND},
* {@link TemporalUnit#DATE}, and
* {@link TemporalUnit#TIME}, which are desugared
* by the parser).
*/
@Override
public String extractPattern(TemporalUnit unit) {
return switch (unit) {
case DAY_OF_MONTH -> "day(?2)";
case DAY_OF_YEAR -> "({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),?2)}+1)";
// Use the approach as outlined here: https://stackoverflow.com/questions/36357013/day-of-week-from-seconds-since-epoch
case DAY_OF_WEEK -> "(mod(mod({fn timestampdiff(sql_tsi_day,{d '1970-01-01'},?2)}+4,7)+7,7)+1)";
// Use the approach as outlined here: https://www.sqlservercentral.com/articles/a-simple-formula-to-calculate-the-iso-week-number
// In SQL Server terms this is (DATEPART(dy,DATEADD(dd,DATEDIFF(dd,'17530101',@SomeDate)/7*7,'17530104'))+6)/7
case WEEK -> "(({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),{fn timestampadd(sql_tsi_day,{fn timestampdiff(sql_tsi_day,{d '1753-01-01'},?2)}/7*7,{d '1753-01-04'})})}+7)/7)";
case QUARTER -> "((month(?2)+2)/3)";
case EPOCH -> "{fn timestampdiff(sql_tsi_second,{ts '1970-01-01 00:00:00'},?2)}";
default -> "?1(?2)";
};
}
@Override
public String translateExtractField(TemporalUnit unit) {
return switch (unit) {
case WEEK, DAY_OF_YEAR, DAY_OF_WEEK -> throw new UnsupportedOperationException("field type not supported on Derby: " + unit);
case DAY_OF_MONTH -> "day";
default -> super.translateExtractField(unit);
};
}
/**
* Derby does have a real {@link Types#BOOLEAN}
* type, but it doesn't know how to cast to it. Worse,
* Derby makes us use the {@code double()} function to
* cast things to its floating point types.
*/
@Override
public String castPattern(CastType from, CastType to) {
switch ( to ) {
case FLOAT:
return "cast(double(?1) as real)";
case DOUBLE:
return "double(?1)";
case STRING:
// Derby madness http://db.apache.org/derby/docs/10.8/ref/rrefsqlj33562.html
// With a nice rant: https://blog.jooq.org/2011/10/29/derby-casting-madness-the-sequel/
// See https://issues.apache.org/jira/browse/DERBY-2072
// Since numerics can't be cast to varchar directly, use char(254) i.e. with the maximum char capacity
// as an intermediate type before converting to varchar
switch ( from ) {
case FLOAT:
case DOUBLE:
// Derby can't cast to char directly, but needs to be cast to decimal first...
return "cast(trim(cast(cast(?1 as decimal(" + getDefaultDecimalPrecision() + "," + BigDecimalJavaType.INSTANCE.getDefaultSqlScale( this, null ) + ")) as char(254))) as ?2)";
case INTEGER:
case LONG:
case FIXED:
return "cast(trim(cast(?1 as char(254))) as ?2)";
case DATE:
// The maximum length of a date
return "cast(?1 as varchar(10))";
case TIME:
// The maximum length of a time
return "cast(?1 as varchar(8))";
case TIMESTAMP:
// The maximum length of a timestamp
return "cast(?1 as varchar(30))";
}
break;
}
return super.castPattern( from, to );
}
@Override
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType, IntervalType intervalType) {
switch (unit) {
case NANOSECOND:
case NATIVE:
return "{fn timestampadd(sql_tsi_frac_second,mod(bigint(?2),1000000000),{fn timestampadd(sql_tsi_second,bigint((?2)/1000000000),?3)})}";
default:
return "{fn timestampadd(sql_tsi_?1,bigint(?2),?3)}";
}
}
@Override
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
return switch (unit) {
case NANOSECOND, NATIVE -> "{fn timestampdiff(sql_tsi_frac_second,?2,?3)}";
default -> "{fn timestampdiff(sql_tsi_?1,?2,?3)}";
};
}
@Override
public void appendBooleanValueString(SqlAppender appender, boolean bool) {
if ( getVersion().isBefore( 10, 7 ) ) {
appender.appendSql( bool ? '1' : '0' );
}
else {
appender.appendSql( bool );
}
}
@Override
public SequenceSupport getSequenceSupport() {
return getVersion().isBefore( 10, 6 )
? super.getSequenceSupport()
: DerbySequenceSupport.INSTANCE;
}
@Override
public String getQuerySequencesString() {
return getVersion().isBefore( 10, 6 )
? null
: "select sys.sysschemas.schemaname as sequence_schema,sys.syssequences.* from sys.syssequences left join sys.sysschemas on sys.syssequences.schemaid=sys.sysschemas.schemaid";
}
@Override
public SequenceInformationExtractor getSequenceInformationExtractor() {
return getVersion().isBefore( 10, 6 )
? SequenceInformationExtractorNoOpImpl.INSTANCE
: SequenceInformationExtractorDerbyDatabaseImpl.INSTANCE;
}
@Override
public String[] getDropSchemaCommand(String schemaName) {
return new String[] {"drop schema " + schemaName + " restrict"};
}
@Override
public String getSelectClauseNullString(int sqlType, TypeConfiguration typeConfiguration) {
return DB2Dialect.selectNullString( sqlType );
}
@Override
public boolean supportsCommentOn() {
//HHH-4531
return false;
}
@Override
protected LockingClauseStrategy buildLockingClauseStrategy(
PessimisticLockKind lockKind,
RowLockStrategy rowLockStrategy,
LockOptions lockOptions) {
return new DerbyLockingClauseStrategy( this, lockKind, rowLockStrategy, lockOptions );
}
@Override
public String getForUpdateString() {
return " for update with rs";
}
@Override
public String getWriteLockString(Timeout timeout) {
return " for update with rs";
}
@Override
public String getReadLockString(Timeout timeout) {
return " for read only with rs";
}
@Override
public String getWriteLockString(int timeout) {
return " for update with rs";
}
@Override
public String getReadLockString(int timeout) {
return " for read only with rs";
}
@Override
public LockingSupport getLockingSupport() {
return LockingSupportSimple.NO_OUTER_JOIN;
}
@Override
public boolean supportsExistsInSelect() {
//TODO: check this!
return false;
}
@Override
public boolean supportsCurrentTimestampSelection() {
return true;
}
@Override
public String getCurrentTimestampSelectString() {
return "values current timestamp";
}
@Override
public boolean isCurrentTimestampSelectStringCallable() {
return false;
}
@Override
public LimitHandler getLimitHandler() {
return limitHandler;
}
@Override
public IdentityColumnSupport getIdentityColumnSupport() {
return DB2IdentityColumnSupport.INSTANCE;
}
@Override
public boolean doesReadCommittedCauseWritersToBlockReaders() {
//TODO: check this
return true;
}
@Override
public boolean supportsResultSetPositionQueryMethodsOnForwardOnlyCursor() {
return false;
}
@Override
public boolean supportsTupleDistinctCounts() {
//checked on Derby 10.14
return false;
}
@Override
public boolean supportsOrderByInSubquery() {
// As of version 10.5 Derby supports OFFSET and FETCH as well as ORDER BY in subqueries
return getVersion().isSameOrAfter( 10, 5 );
}
@Override
public boolean requiresCastForConcatenatingNonStrings() {
return true;
}
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
if ( getVersion().isBefore( 10, 7 ) ) {
jdbcTypeRegistry.addDescriptor( Types.BOOLEAN, SmallIntJdbcType.INSTANCE );
}
jdbcTypeRegistry.addDescriptor( Types.TIMESTAMP_WITH_TIMEZONE, TimestampJdbcType.INSTANCE );
// Derby requires a custom binder for binding untyped nulls that resolves the type through the statement
typeContributions.contributeJdbcType( ObjectNullResolvingJdbcType.INSTANCE );
// Until we remove StandardBasicTypes, we have to keep this
typeContributions.contributeType(
new JavaObjectType(
ObjectNullResolvingJdbcType.INSTANCE,
typeContributions.getTypeConfiguration()
.getJavaTypeRegistry()
.getDescriptor( Object.class )
)
);
}
// Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public boolean supportsLobValueChangePropagation() {
return false;
}
@Override
public boolean supportsUnboundedLobLocatorMaterialization() {
return false;
}
@Override
public ViolatedConstraintNameExtractor getViolatedConstraintNameExtractor() {
return new TemplatedViolatedConstraintNameExtractor( sqle -> {
final String sqlState = JdbcExceptionHelper.extractSqlState( sqle );
if ( sqlState != null ) {
switch ( sqlState ) {
case "23505":
return TemplatedViolatedConstraintNameExtractor.extractUsingTemplate(
"'", "'",
sqle.getMessage()
);
}
}
return null;
} );
}
@Override
public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() {
return (sqlException, message, sql) -> {
final String sqlState = JdbcExceptionHelper.extractSqlState( sqlException );
// final int errorCode = JdbcExceptionHelper.extractErrorCode( sqlException );
final String constraintName;
if ( sqlState != null ) {
switch ( sqlState ) {
case "23505":
// Unique constraint violation
constraintName = getViolatedConstraintNameExtractor().extractConstraintName(sqlException);
return new ConstraintViolationException(
message,
sqlException,
sql,
ConstraintViolationException.ConstraintKind.UNIQUE,
constraintName
);
case "40XL1", "40XL2":
return new LockTimeoutException( message, sqlException, sql );
}
}
return null;
};
}
@Override
public void appendDatetimeFormat(SqlAppender appender, String format) {
throw new UnsupportedOperationException("format() function not supported on Derby");
}
@Override
protected void registerDefaultKeywords() {
super.registerDefaultKeywords();
registerKeyword( "ADD" );
registerKeyword( "ALL" );
registerKeyword( "ALLOCATE" );
registerKeyword( "ALTER" );
registerKeyword( "AND" );
registerKeyword( "ANY" );
registerKeyword( "ARE" );
registerKeyword( "AS" );
registerKeyword( "ASC" );
registerKeyword( "ASSERTION" );
registerKeyword( "AT" );
registerKeyword( "AUTHORIZATION" );
registerKeyword( "AVG" );
registerKeyword( "BEGIN" );
registerKeyword( "BETWEEN" );
registerKeyword( "BIT" );
registerKeyword( "BOOLEAN" );
registerKeyword( "BOTH" );
registerKeyword( "BY" );
registerKeyword( "CALL" );
registerKeyword( "CASCADE" );
registerKeyword( "CASCADED" );
registerKeyword( "CASE" );
registerKeyword( "CAST" );
registerKeyword( "CHAR" );
registerKeyword( "CHARACTER" );
registerKeyword( "CHECK" );
registerKeyword( "CLOSE" );
registerKeyword( "COLLATE" );
registerKeyword( "COLLATION" );
registerKeyword( "COLUMN" );
registerKeyword( "COMMIT" );
registerKeyword( "CONNECT" );
registerKeyword( "CONNECTION" );
registerKeyword( "CONSTRAINT" );
registerKeyword( "CONSTRAINTS" );
registerKeyword( "CONTINUE" );
registerKeyword( "CONVERT" );
registerKeyword( "CORRESPONDING" );
registerKeyword( "COUNT" );
registerKeyword( "CREATE" );
registerKeyword( "CURRENT" );
registerKeyword( "CURRENT_DATE" );
registerKeyword( "CURRENT_TIME" );
registerKeyword( "CURRENT_TIMESTAMP" );
registerKeyword( "CURRENT_USER" );
registerKeyword( "CURSOR" );
registerKeyword( "DEALLOCATE" );
registerKeyword( "DEC" );
registerKeyword( "DECIMAL" );
registerKeyword( "DECLARE" );
registerKeyword( "DEFERRABLE" );
registerKeyword( "DEFERRED" );
registerKeyword( "DELETE" );
registerKeyword( "DESC" );
registerKeyword( "DESCRIBE" );
registerKeyword( "DIAGNOSTICS" );
registerKeyword( "DISCONNECT" );
registerKeyword( "DISTINCT" );
registerKeyword( "DOUBLE" );
registerKeyword( "DROP" );
registerKeyword( "ELSE" );
registerKeyword( "END" );
registerKeyword( "ENDEXEC" );
registerKeyword( "ESCAPE" );
registerKeyword( "EXCEPT" );
registerKeyword( "EXCEPTION" );
registerKeyword( "EXEC" );
registerKeyword( "EXECUTE" );
registerKeyword( "EXISTS" );
registerKeyword( "EXPLAIN" );
registerKeyword( "EXTERNAL" );
registerKeyword( "FALSE" );
registerKeyword( "FETCH" );
registerKeyword( "FIRST" );
registerKeyword( "FLOAT" );
registerKeyword( "FOR" );
registerKeyword( "FOREIGN" );
registerKeyword( "FOUND" );
registerKeyword( "FROM" );
registerKeyword( "FULL" );
registerKeyword( "FUNCTION" );
registerKeyword( "GET" );
registerKeyword( "GET_CURRENT_CONNECTION" );
registerKeyword( "GLOBAL" );
registerKeyword( "GO" );
registerKeyword( "GOTO" );
registerKeyword( "GRANT" );
registerKeyword( "GROUP" );
registerKeyword( "HAVING" );
registerKeyword( "HOUR" );
registerKeyword( "IDENTITY" );
registerKeyword( "IMMEDIATE" );
registerKeyword( "IN" );
registerKeyword( "INDICATOR" );
registerKeyword( "INITIALLY" );
registerKeyword( "INNER" );
registerKeyword( "INOUT" );
registerKeyword( "INPUT" );
registerKeyword( "INSENSITIVE" );
registerKeyword( "INSERT" );
registerKeyword( "INT" );
registerKeyword( "INTEGER" );
registerKeyword( "INTERSECT" );
registerKeyword( "INTO" );
registerKeyword( "IS" );
registerKeyword( "ISOLATION" );
registerKeyword( "JOIN" );
registerKeyword( "KEY" );
registerKeyword( "LAST" );
registerKeyword( "LEFT" );
registerKeyword( "LIKE" );
registerKeyword( "LONGINT" );
registerKeyword( "LOWER" );
registerKeyword( "LTRIM" );
registerKeyword( "MATCH" );
registerKeyword( "MAX" );
registerKeyword( "MIN" );
registerKeyword( "MINUTE" );
registerKeyword( "NATIONAL" );
registerKeyword( "NATURAL" );
registerKeyword( "NCHAR" );
registerKeyword( "NVARCHAR" );
registerKeyword( "NEXT" );
registerKeyword( "NO" );
registerKeyword( "NOT" );
registerKeyword( "NULL" );
registerKeyword( "NULLIF" );
registerKeyword( "NUMERIC" );
registerKeyword( "OF" );
registerKeyword( "ON" );
registerKeyword( "ONLY" );
registerKeyword( "OPEN" );
registerKeyword( "OPTION" );
registerKeyword( "OR" );
registerKeyword( "ORDER" );
registerKeyword( "OUT" );
registerKeyword( "OUTER" );
registerKeyword( "OUTPUT" );
registerKeyword( "OVERLAPS" );
registerKeyword( "PAD" );
registerKeyword( "PARTIAL" );
registerKeyword( "PREPARE" );
registerKeyword( "PRESERVE" );
registerKeyword( "PRIMARY" );
registerKeyword( "PRIOR" );
registerKeyword( "PRIVILEGES" );
registerKeyword( "PROCEDURE" );
registerKeyword( "PUBLIC" );
registerKeyword( "READ" );
registerKeyword( "REAL" );
registerKeyword( "REFERENCES" );
registerKeyword( "RELATIVE" );
registerKeyword( "RESTRICT" );
registerKeyword( "REVOKE" );
registerKeyword( "RIGHT" );
registerKeyword( "ROLLBACK" );
registerKeyword( "ROWS" );
registerKeyword( "RTRIM" );
registerKeyword( "SCHEMA" );
registerKeyword( "SCROLL" );
registerKeyword( "SECOND" );
registerKeyword( "SELECT" );
registerKeyword( "SESSION_USER" );
registerKeyword( "SET" );
registerKeyword( "SMALLINT" );
registerKeyword( "SOME" );
registerKeyword( "SPACE" );
registerKeyword( "SQL" );
registerKeyword( "SQLCODE" );
registerKeyword( "SQLERROR" );
registerKeyword( "SQLSTATE" );
registerKeyword( "SUBSTR" );
registerKeyword( "SUBSTRING" );
registerKeyword( "SUM" );
registerKeyword( "SYSTEM_USER" );
registerKeyword( "TABLE" );
registerKeyword( "TEMPORARY" );
registerKeyword( "TIMEZONE_HOUR" );
registerKeyword( "TIMEZONE_MINUTE" );
registerKeyword( "TO" );
registerKeyword( "TRAILING" );
registerKeyword( "TRANSACTION" );
registerKeyword( "TRANSLATE" );
registerKeyword( "TRANSLATION" );
registerKeyword( "TRUE" );
registerKeyword( "UNION" );
registerKeyword( "UNIQUE" );
registerKeyword( "UNKNOWN" );
registerKeyword( "UPDATE" );
registerKeyword( "UPPER" );
registerKeyword( "USER" );
registerKeyword( "USING" );
registerKeyword( "VALUES" );
registerKeyword( "VARCHAR" );
registerKeyword( "VARYING" );
registerKeyword( "VIEW" );
registerKeyword( "WHENEVER" );
registerKeyword( "WHERE" );
registerKeyword( "WITH" );
registerKeyword( "WORK" );
registerKeyword( "WRITE" );
registerKeyword( "XML" );
registerKeyword( "XMLEXISTS" );
registerKeyword( "XMLPARSE" );
registerKeyword( "XMLSERIALIZE" );
registerKeyword( "YEAR" );
}
@Override
public SqmMultiTableMutationStrategy getFallbackSqmMutationStrategy(
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableMutationStrategy( rootEntityDescriptor, runtimeModelCreationContext );
}
@Override
public SqmMultiTableInsertStrategy getFallbackSqmInsertStrategy(
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableInsertStrategy( rootEntityDescriptor, runtimeModelCreationContext );
}
@Override
public TemporaryTableKind getSupportedTemporaryTableKind() {
return TemporaryTableKind.LOCAL;
}
@Override
public TemporaryTableStrategy getLocalTemporaryTableStrategy() {
return DerbyLocalTemporaryTableStrategy.INSTANCE;
}
@Override
public String getTemporaryTableCreateOptions() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableCreateOptions();
}
@Override
public String getTemporaryTableCreateCommand() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableCreateCommand();
}
@Override
public BeforeUseAction getTemporaryTableBeforeUseAction() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableBeforeUseAction();
}
@Override
public boolean supportsTemporaryTablePrimaryKey() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.supportsTemporaryTablePrimaryKey();
}
@Override
public boolean supportsPartitionBy() {
return false;
}
@Override
public boolean supportsWindowFunctions() {
// It seems at least the row_number function is supported as of 10.4
return getVersion().isSameOrAfter( 10, 4 );
}
@Override
public boolean supportsValuesList() {
return true;
}
@Override
public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, DatabaseMetaData metadata)
throws SQLException {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, metadata );
}
@Override
public DmlTargetColumnQualifierSupport getDmlTargetColumnQualifierSupport() {
return DmlTargetColumnQualifierSupport.TABLE_ALIAS;
}
@Override
public String getDual() {
return "(values 0)";
}
@Override
public String getFromDualForSelectOnly() {
return " from " + getDual() + " dual";
}
@Override
public boolean supportsJoinInMutationStatementSubquery() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntax() {
return false;
}
@Override
public boolean supportsWithClause() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntaxInInList() {
return false;
}
}
|
googleapis/google-cloud-java | 37,627 | java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/QueryEligibleBillingAccountsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/channel/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.channel.v1;
/**
*
*
* <pre>
* Response message for QueryEligibleBillingAccounts.
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.QueryEligibleBillingAccountsResponse}
*/
public final class QueryEligibleBillingAccountsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.channel.v1.QueryEligibleBillingAccountsResponse)
QueryEligibleBillingAccountsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use QueryEligibleBillingAccountsResponse.newBuilder() to construct.
private QueryEligibleBillingAccountsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QueryEligibleBillingAccountsResponse() {
skuPurchaseGroups_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new QueryEligibleBillingAccountsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_QueryEligibleBillingAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_QueryEligibleBillingAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse.class,
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse.Builder.class);
}
public static final int SKU_PURCHASE_GROUPS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.channel.v1.SkuPurchaseGroup> skuPurchaseGroups_;
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.channel.v1.SkuPurchaseGroup> getSkuPurchaseGroupsList() {
return skuPurchaseGroups_;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.channel.v1.SkuPurchaseGroupOrBuilder>
getSkuPurchaseGroupsOrBuilderList() {
return skuPurchaseGroups_;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
@java.lang.Override
public int getSkuPurchaseGroupsCount() {
return skuPurchaseGroups_.size();
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.SkuPurchaseGroup getSkuPurchaseGroups(int index) {
return skuPurchaseGroups_.get(index);
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.SkuPurchaseGroupOrBuilder getSkuPurchaseGroupsOrBuilder(
int index) {
return skuPurchaseGroups_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < skuPurchaseGroups_.size(); i++) {
output.writeMessage(1, skuPurchaseGroups_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < skuPurchaseGroups_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, skuPurchaseGroups_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse)) {
return super.equals(obj);
}
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse other =
(com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse) obj;
if (!getSkuPurchaseGroupsList().equals(other.getSkuPurchaseGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSkuPurchaseGroupsCount() > 0) {
hash = (37 * hash) + SKU_PURCHASE_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getSkuPurchaseGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for QueryEligibleBillingAccounts.
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.QueryEligibleBillingAccountsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.QueryEligibleBillingAccountsResponse)
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_QueryEligibleBillingAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_QueryEligibleBillingAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse.class,
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse.Builder.class);
}
// Construct using com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (skuPurchaseGroupsBuilder_ == null) {
skuPurchaseGroups_ = java.util.Collections.emptyList();
} else {
skuPurchaseGroups_ = null;
skuPurchaseGroupsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_QueryEligibleBillingAccountsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse
getDefaultInstanceForType() {
return com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse build() {
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse buildPartial() {
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse result =
new com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse result) {
if (skuPurchaseGroupsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
skuPurchaseGroups_ = java.util.Collections.unmodifiableList(skuPurchaseGroups_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.skuPurchaseGroups_ = skuPurchaseGroups_;
} else {
result.skuPurchaseGroups_ = skuPurchaseGroupsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse) {
return mergeFrom((com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse other) {
if (other
== com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse.getDefaultInstance())
return this;
if (skuPurchaseGroupsBuilder_ == null) {
if (!other.skuPurchaseGroups_.isEmpty()) {
if (skuPurchaseGroups_.isEmpty()) {
skuPurchaseGroups_ = other.skuPurchaseGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.addAll(other.skuPurchaseGroups_);
}
onChanged();
}
} else {
if (!other.skuPurchaseGroups_.isEmpty()) {
if (skuPurchaseGroupsBuilder_.isEmpty()) {
skuPurchaseGroupsBuilder_.dispose();
skuPurchaseGroupsBuilder_ = null;
skuPurchaseGroups_ = other.skuPurchaseGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
skuPurchaseGroupsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSkuPurchaseGroupsFieldBuilder()
: null;
} else {
skuPurchaseGroupsBuilder_.addAllMessages(other.skuPurchaseGroups_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.channel.v1.SkuPurchaseGroup m =
input.readMessage(
com.google.cloud.channel.v1.SkuPurchaseGroup.parser(), extensionRegistry);
if (skuPurchaseGroupsBuilder_ == null) {
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.add(m);
} else {
skuPurchaseGroupsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.channel.v1.SkuPurchaseGroup> skuPurchaseGroups_ =
java.util.Collections.emptyList();
private void ensureSkuPurchaseGroupsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
skuPurchaseGroups_ =
new java.util.ArrayList<com.google.cloud.channel.v1.SkuPurchaseGroup>(
skuPurchaseGroups_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.SkuPurchaseGroup,
com.google.cloud.channel.v1.SkuPurchaseGroup.Builder,
com.google.cloud.channel.v1.SkuPurchaseGroupOrBuilder>
skuPurchaseGroupsBuilder_;
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public java.util.List<com.google.cloud.channel.v1.SkuPurchaseGroup> getSkuPurchaseGroupsList() {
if (skuPurchaseGroupsBuilder_ == null) {
return java.util.Collections.unmodifiableList(skuPurchaseGroups_);
} else {
return skuPurchaseGroupsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public int getSkuPurchaseGroupsCount() {
if (skuPurchaseGroupsBuilder_ == null) {
return skuPurchaseGroups_.size();
} else {
return skuPurchaseGroupsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public com.google.cloud.channel.v1.SkuPurchaseGroup getSkuPurchaseGroups(int index) {
if (skuPurchaseGroupsBuilder_ == null) {
return skuPurchaseGroups_.get(index);
} else {
return skuPurchaseGroupsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder setSkuPurchaseGroups(
int index, com.google.cloud.channel.v1.SkuPurchaseGroup value) {
if (skuPurchaseGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.set(index, value);
onChanged();
} else {
skuPurchaseGroupsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder setSkuPurchaseGroups(
int index, com.google.cloud.channel.v1.SkuPurchaseGroup.Builder builderForValue) {
if (skuPurchaseGroupsBuilder_ == null) {
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.set(index, builderForValue.build());
onChanged();
} else {
skuPurchaseGroupsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder addSkuPurchaseGroups(com.google.cloud.channel.v1.SkuPurchaseGroup value) {
if (skuPurchaseGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.add(value);
onChanged();
} else {
skuPurchaseGroupsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder addSkuPurchaseGroups(
int index, com.google.cloud.channel.v1.SkuPurchaseGroup value) {
if (skuPurchaseGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.add(index, value);
onChanged();
} else {
skuPurchaseGroupsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder addSkuPurchaseGroups(
com.google.cloud.channel.v1.SkuPurchaseGroup.Builder builderForValue) {
if (skuPurchaseGroupsBuilder_ == null) {
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.add(builderForValue.build());
onChanged();
} else {
skuPurchaseGroupsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder addSkuPurchaseGroups(
int index, com.google.cloud.channel.v1.SkuPurchaseGroup.Builder builderForValue) {
if (skuPurchaseGroupsBuilder_ == null) {
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.add(index, builderForValue.build());
onChanged();
} else {
skuPurchaseGroupsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder addAllSkuPurchaseGroups(
java.lang.Iterable<? extends com.google.cloud.channel.v1.SkuPurchaseGroup> values) {
if (skuPurchaseGroupsBuilder_ == null) {
ensureSkuPurchaseGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, skuPurchaseGroups_);
onChanged();
} else {
skuPurchaseGroupsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder clearSkuPurchaseGroups() {
if (skuPurchaseGroupsBuilder_ == null) {
skuPurchaseGroups_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
skuPurchaseGroupsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public Builder removeSkuPurchaseGroups(int index) {
if (skuPurchaseGroupsBuilder_ == null) {
ensureSkuPurchaseGroupsIsMutable();
skuPurchaseGroups_.remove(index);
onChanged();
} else {
skuPurchaseGroupsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public com.google.cloud.channel.v1.SkuPurchaseGroup.Builder getSkuPurchaseGroupsBuilder(
int index) {
return getSkuPurchaseGroupsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public com.google.cloud.channel.v1.SkuPurchaseGroupOrBuilder getSkuPurchaseGroupsOrBuilder(
int index) {
if (skuPurchaseGroupsBuilder_ == null) {
return skuPurchaseGroups_.get(index);
} else {
return skuPurchaseGroupsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public java.util.List<? extends com.google.cloud.channel.v1.SkuPurchaseGroupOrBuilder>
getSkuPurchaseGroupsOrBuilderList() {
if (skuPurchaseGroupsBuilder_ != null) {
return skuPurchaseGroupsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(skuPurchaseGroups_);
}
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public com.google.cloud.channel.v1.SkuPurchaseGroup.Builder addSkuPurchaseGroupsBuilder() {
return getSkuPurchaseGroupsFieldBuilder()
.addBuilder(com.google.cloud.channel.v1.SkuPurchaseGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public com.google.cloud.channel.v1.SkuPurchaseGroup.Builder addSkuPurchaseGroupsBuilder(
int index) {
return getSkuPurchaseGroupsFieldBuilder()
.addBuilder(index, com.google.cloud.channel.v1.SkuPurchaseGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* List of SKU purchase groups where each group represents a set of SKUs that
* must be purchased using the same billing account. Each SKU from
* [QueryEligibleBillingAccountsRequest.skus] will appear in exactly one SKU
* group.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.SkuPurchaseGroup sku_purchase_groups = 1;</code>
*/
public java.util.List<com.google.cloud.channel.v1.SkuPurchaseGroup.Builder>
getSkuPurchaseGroupsBuilderList() {
return getSkuPurchaseGroupsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.SkuPurchaseGroup,
com.google.cloud.channel.v1.SkuPurchaseGroup.Builder,
com.google.cloud.channel.v1.SkuPurchaseGroupOrBuilder>
getSkuPurchaseGroupsFieldBuilder() {
if (skuPurchaseGroupsBuilder_ == null) {
skuPurchaseGroupsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.SkuPurchaseGroup,
com.google.cloud.channel.v1.SkuPurchaseGroup.Builder,
com.google.cloud.channel.v1.SkuPurchaseGroupOrBuilder>(
skuPurchaseGroups_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
skuPurchaseGroups_ = null;
}
return skuPurchaseGroupsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.QueryEligibleBillingAccountsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.channel.v1.QueryEligibleBillingAccountsResponse)
private static final com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse();
}
public static com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<QueryEligibleBillingAccountsResponse> PARSER =
new com.google.protobuf.AbstractParser<QueryEligibleBillingAccountsResponse>() {
@java.lang.Override
public QueryEligibleBillingAccountsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<QueryEligibleBillingAccountsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<QueryEligibleBillingAccountsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.channel.v1.QueryEligibleBillingAccountsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/qpid-jms | 37,508 | qpid-jms-client/src/test/java/org/apache/qpid/jms/message/JmsBytesMessageTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.jms.message;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import jakarta.jms.BytesMessage;
import jakarta.jms.JMSException;
import jakarta.jms.MessageEOFException;
import jakarta.jms.MessageFormatException;
import jakarta.jms.MessageNotReadableException;
import jakarta.jms.MessageNotWriteableException;
import org.apache.qpid.jms.message.facade.JmsBytesMessageFacade;
import org.apache.qpid.jms.message.facade.test.JmsTestBytesMessageFacade;
import org.apache.qpid.jms.message.facade.test.JmsTestMessageFactory;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
/**
* Test for JMS Spec compliance for the JmsBytesMessage class using the default message facade.
*/
public class JmsBytesMessageTest {
private static final int END_OF_STREAM = -1;
private final JmsMessageFactory factory = new JmsTestMessageFactory();
@Test
public void testToString() throws Exception {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
assertTrue(bytesMessage.toString().startsWith("JmsBytesMessage"));
}
/**
* Test that calling {@link BytesMessage#getBodyLength()} on a new message which has been
* populated and {@link BytesMessage#reset()} causes the length to be reported correctly.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testResetOnNewlyPopulatedBytesMessageUpdatesBodyLength() throws Exception {
byte[] bytes = "newResetTestBytes".getBytes();
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.writeBytes(bytes);
bytesMessage.reset();
assertEquals(bytes.length, bytesMessage.getBodyLength(), "Message reports unexpected length");
}
/**
* Test that attempting to call {@link BytesMessage#getBodyLength()} on a new message causes
* a {@link MessageNotReadableException} to be thrown due to being write-only.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testGetBodyLengthOnNewMessageThrowsMessageNotReadableException() throws Exception {
assertThrows(MessageNotReadableException.class, () -> {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.getBodyLength();
});
}
@Test
public void testReadBytesUsingReceivedMessageWithNoBodyReturnsEOS() throws Exception {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.onDispatch();
//verify attempting to read bytes returns -1, i.e EOS
assertEquals(END_OF_STREAM, bytesMessage.readBytes(new byte[1]), "Expected input stream to be at end but data was returned");
}
@Test
public void testReadBytesUsingReceivedMessageWithBodyReturnsBytes() throws Exception {
byte[] content = "myBytesData".getBytes();
JmsTestBytesMessageFacade facade = new JmsTestBytesMessageFacade(content);
JmsBytesMessage bytesMessage = new JmsBytesMessage(facade);
bytesMessage.onDispatch();
// retrieve the expected bytes, check they match
byte[] receivedBytes = new byte[content.length];
bytesMessage.readBytes(receivedBytes);
assertTrue(Arrays.equals(content, receivedBytes));
// verify no more bytes remain, i.e EOS
assertEquals(END_OF_STREAM, bytesMessage.readBytes(new byte[1]), "Expected input stream to be at end but data was returned");
assertEquals(content.length, bytesMessage.getBodyLength(), "Message reports unexpected length");
}
/**
* Test that attempting to write bytes to a received message (without calling {@link BytesMessage#clearBody()} first)
* causes a {@link MessageNotWriteableException} to be thrown due to being read-only.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testReceivedBytesMessageThrowsMessageNotWriteableExceptionOnWriteBytes() throws Exception {
assertThrows(MessageNotWriteableException.class, () -> {
byte[] content = "myBytesData".getBytes();
JmsTestBytesMessageFacade facade = new JmsTestBytesMessageFacade(content);
JmsBytesMessage bytesMessage = new JmsBytesMessage(facade);
bytesMessage.onDispatch();
bytesMessage.writeBytes(content);
});
}
/**
* Test that attempting to read bytes from a new message (without calling {@link BytesMessage#reset()} first) causes a
* {@link MessageNotReadableException} to be thrown due to being write-only.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testNewBytesMessageThrowsMessageNotReadableOnReadBytes() throws Exception {
assertThrows(MessageNotReadableException.class, () -> {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
byte[] receivedBytes = new byte[1];
bytesMessage.readBytes(receivedBytes);
});
}
/**
* Test that calling {@link BytesMessage#clearBody()} causes a received
* message to become writable
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testClearBodyOnReceivedBytesMessageMakesMessageWritable() throws Exception {
byte[] content = "myBytesData".getBytes();
JmsTestBytesMessageFacade facade = new JmsTestBytesMessageFacade(content);
JmsBytesMessage bytesMessage = new JmsBytesMessage(facade);
bytesMessage.onDispatch();
assertTrue(bytesMessage.isReadOnlyBody(), "Message should not be writable");
bytesMessage.clearBody();
assertFalse(bytesMessage.isReadOnlyBody(), "Message should be writable");
}
/**
* Test that calling {@link BytesMessage#clearBody()} of a received message
* causes the facade input stream to be empty and body length to return 0.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testClearBodyOnReceivedBytesMessageClearsFacadeInputStream() throws Exception {
byte[] content = "myBytesData".getBytes();
JmsTestBytesMessageFacade facade = new JmsTestBytesMessageFacade(content);
JmsBytesMessage bytesMessage = new JmsBytesMessage(facade);
bytesMessage.onDispatch();
assertTrue(facade.getBodyLength() > 0, "Expected message content but none was present");
assertEquals(1, facade.getInputStream().read(new byte[1]), "Expected data from facade");
bytesMessage.clearBody();
assertTrue(facade.getBodyLength() == 0, "Expected no message content from facade");
assertEquals(END_OF_STREAM, facade.getInputStream().read(new byte[1]), "Expected no data from facade, but got some");
}
/**
* Test that attempting to call {@link BytesMessage#getBodyLength()} on a received message after calling
* {@link BytesMessage#clearBody()} causes {@link MessageNotReadableException} to be thrown due to being write-only.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testGetBodyLengthOnClearedReceivedMessageThrowsMessageNotReadableException() throws Exception {
byte[] content = "myBytesData".getBytes();
JmsTestBytesMessageFacade facade = new JmsTestBytesMessageFacade(content);
JmsBytesMessage bytesMessage = new JmsBytesMessage(facade);
bytesMessage.onDispatch();
assertEquals(content.length, bytesMessage.getBodyLength(), "Unexpected message length");
bytesMessage.clearBody();
try {
bytesMessage.getBodyLength();
fail("expected exception to be thrown");
} catch (MessageNotReadableException mnre) {
// expected
}
}
/**
* Test that calling {@link BytesMessage#reset()} causes a write-only
* message to become read-only
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testResetOnReceivedBytesMessageResetsMarker() throws Exception {
byte[] content = "myBytesData".getBytes();
JmsTestBytesMessageFacade facade = new JmsTestBytesMessageFacade(content);
JmsBytesMessage bytesMessage = new JmsBytesMessage(facade);
bytesMessage.onDispatch();
// retrieve a few bytes, check they match the first few expected bytes
byte[] partialBytes = new byte[3];
bytesMessage.readBytes(partialBytes);
byte[] partialOriginalBytes = Arrays.copyOf(content, 3);
assertTrue(Arrays.equals(partialOriginalBytes, partialBytes));
bytesMessage.reset();
// retrieve all the expected bytes, check they match
byte[] resetBytes = new byte[content.length];
bytesMessage.readBytes(resetBytes);
assertTrue(Arrays.equals(content, resetBytes));
}
/**
* Test that calling {@link BytesMessage#reset()} on a new message which has been populated
* causes the marker to be reset and makes the message read-only
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testResetOnNewlyPopulatedBytesMessageResetsMarkerAndMakesReadable() throws Exception {
byte[] content = "myBytesData".getBytes();
JmsTestBytesMessageFacade facade = new JmsTestBytesMessageFacade(content);
JmsBytesMessage bytesMessage = new JmsBytesMessage(facade);
assertFalse(bytesMessage.isReadOnlyBody(), "Message should be writable");
bytesMessage.writeBytes(content);
bytesMessage.reset();
assertTrue(bytesMessage.isReadOnlyBody(), "Message should not be writable");
// retrieve the bytes, check they match
byte[] resetBytes = new byte[content.length];
bytesMessage.readBytes(resetBytes);
assertTrue(Arrays.equals(content, resetBytes));
}
/**
* Verify that nothing is read when {@link BytesMessage#readBytes(byte[])} is
* called with a zero length destination array.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testReadBytesWithZeroLengthDestination() throws Exception {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.reset();
assertEquals(0, bytesMessage.readBytes(new byte[0]), "Did not expect any bytes to be read");
}
/**
* Verify that when {@link BytesMessage#readBytes(byte[], int)} is called
* with a negative length that an {@link IndexOutOfBoundsException} is thrown.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testReadBytesWithNegativeLengthThrowsIOOBE() throws Exception
{
assertThrows(IndexOutOfBoundsException.class, () -> {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.reset();
bytesMessage.readBytes(new byte[0], -1);
});
}
/**
* Verify that when {@link BytesMessage#readBytes(byte[], int)} is called
* with a length that is greater than the size of the provided array,
* an {@link IndexOutOfBoundsException} is thrown.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testReadBytesWithLengthGreatThanArraySizeThrowsIOOBE() throws Exception {
assertThrows(IndexOutOfBoundsException.class, () -> {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.reset();
bytesMessage.readBytes(new byte[1], 2);
});
}
/**
* Test that writing a null using {@link BytesMessage#writeObject(Object)}
* results in a NPE being thrown.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testWriteObjectWithNullThrowsNPE() throws Exception {
assertThrows(NullPointerException.class, () -> {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.writeObject(null);
});
}
/**
* Test that writing a null using {@link BytesMessage#writeObject(Object)}
* results in an {@link MessageFormatException} being thrown.
*
* @throws Exception if an error occurs during the test.
*/
@Test
public void testWriteObjectWithIllegalTypeThrowsMFE() throws Exception {
assertThrows(MessageFormatException.class, () -> {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.writeObject(new Object());
});
}
@Test
public void testGetBodyLength() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
int len = 10;
for (int i = 0; i < len; i++) {
msg.writeLong(5L);
}
msg.reset();
assertTrue(msg.getBodyLength() == (len * 8));
}
@Test
public void testReadBoolean() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeBoolean(true);
msg.reset();
assertTrue(msg.readBoolean());
}
@Test
public void testReadByte() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeByte((byte) 2);
msg.reset();
assertTrue(msg.readByte() == 2);
}
@Test
public void testReadUnsignedByte() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeByte((byte) 2);
msg.reset();
assertTrue(msg.readUnsignedByte() == 2);
}
@Test
public void testReadShort() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeShort((short) 3000);
msg.reset();
assertTrue(msg.readShort() == 3000);
}
@Test
public void testReadUnsignedShort() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeShort((short) 3000);
msg.reset();
assertTrue(msg.readUnsignedShort() == 3000);
}
@Test
public void testReadChar() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeChar('a');
msg.reset();
assertTrue(msg.readChar() == 'a');
}
@Test
public void testReadInt() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeInt(3000);
msg.reset();
assertTrue(msg.readInt() == 3000);
}
@Test
public void testReadLong() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeLong(3000);
msg.reset();
assertTrue(msg.readLong() == 3000);
}
@Test
public void testReadFloat() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeFloat(3.3f);
msg.reset();
assertTrue(msg.readFloat() == 3.3f);
}
@Test
public void testReadDouble() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
msg.writeDouble(3.3d);
msg.reset();
assertTrue(msg.readDouble() == 3.3d);
}
@Test
public void testReadUTF() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
String str = "this is a test";
msg.writeUTF(str);
msg.reset();
assertTrue(msg.readUTF().equals(str));
}
@Test
public void testReadBytesbyteArray() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
byte[] data = new byte[50];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) i;
}
msg.writeBytes(data);
msg.reset();
byte[] test = new byte[data.length];
msg.readBytes(test);
for (int i = 0; i < test.length; i++) {
assertTrue(test[i] == i);
}
}
@Test
public void testWriteObject() throws JMSException {
JmsBytesMessage msg = factory.createBytesMessage();
try {
msg.writeObject("fred");
msg.writeObject(Boolean.TRUE);
msg.writeObject(Character.valueOf('q'));
msg.writeObject(Byte.valueOf((byte) 1));
msg.writeObject(Short.valueOf((short) 3));
msg.writeObject(Integer.valueOf(3));
msg.writeObject(Long.valueOf(300L));
msg.writeObject(Float.valueOf(3.3f));
msg.writeObject(Double.valueOf(3.3));
msg.writeObject(new byte[3]);
} catch (MessageFormatException mfe) {
fail("objectified primitives should be allowed");
}
try {
msg.writeObject(new Object());
fail("only objectified primitives are allowed");
} catch (MessageFormatException mfe) {
}
}
@Test
public void testClearBodyOnNewMessage() throws JMSException {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.writeInt(1);
bytesMessage.clearBody();
assertFalse(bytesMessage.isReadOnlyBody());
bytesMessage.reset();
assertEquals(0, bytesMessage.getBodyLength());
}
@Test
public void testReset() throws JMSException {
JmsBytesMessage message = factory.createBytesMessage();
try {
message.writeDouble(24.5);
message.writeLong(311);
} catch (MessageNotWriteableException mnwe) {
fail("should be writeable");
}
message.reset();
try {
assertTrue(message.isReadOnlyBody());
assertEquals(message.readDouble(), 24.5, 0);
assertEquals(message.readLong(), 311);
} catch (MessageNotReadableException mnre) {
fail("should be readable");
}
try {
message.writeInt(33);
fail("should throw exception");
} catch (MessageNotWriteableException mnwe) {
}
}
@Test
public void testReadOnlyBody() throws JMSException {
JmsBytesMessage message = factory.createBytesMessage();
try {
message.writeBoolean(true);
message.writeByte((byte) 1);
message.writeByte((byte) 1);
message.writeBytes(new byte[1]);
message.writeBytes(new byte[3], 0, 2);
message.writeChar('a');
message.writeDouble(1.5);
message.writeFloat((float) 1.5);
message.writeInt(1);
message.writeLong(1);
message.writeObject("stringobj");
message.writeShort((short) 1);
message.writeShort((short) 1);
message.writeUTF("utfstring");
} catch (MessageNotWriteableException mnwe) {
fail("Should be writeable");
}
message.reset();
try {
message.readBoolean();
message.readByte();
message.readUnsignedByte();
message.readBytes(new byte[1]);
message.readBytes(new byte[2], 2);
message.readChar();
message.readDouble();
message.readFloat();
message.readInt();
message.readLong();
message.readUTF();
message.readShort();
message.readUnsignedShort();
message.readUTF();
} catch (MessageNotReadableException mnwe) {
fail("Should be readable");
}
try {
message.writeBoolean(true);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeByte((byte) 1);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeBytes(new byte[1]);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeBytes(new byte[3], 0, 2);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeChar('a');
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeDouble(1.5);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeFloat((float) 1.5);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeInt(1);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeLong(1);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeObject("stringobj");
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeShort((short) 1);
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
try {
message.writeUTF("utfstring");
fail("Should have thrown exception");
} catch (MessageNotWriteableException mnwe) {
}
}
@Test
public void testWriteOnlyBody() throws JMSException {
JmsBytesMessage message = factory.createBytesMessage();
message.clearBody();
try {
message.writeBoolean(true);
message.writeByte((byte) 1);
message.writeByte((byte) 1);
message.writeBytes(new byte[1]);
message.writeBytes(new byte[3], 0, 2);
message.writeChar('a');
message.writeDouble(1.5);
message.writeFloat((float) 1.5);
message.writeInt(1);
message.writeLong(1);
message.writeObject("stringobj");
message.writeShort((short) 1);
message.writeShort((short) 1);
message.writeUTF("utfstring");
} catch (MessageNotWriteableException mnwe) {
fail("Should be writeable");
}
try {
message.readBoolean();
fail("Should have thrown exception");
} catch (MessageNotReadableException mnwe) {
}
try {
message.readByte();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readUnsignedByte();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readBytes(new byte[1]);
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readBytes(new byte[2], 2);
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readChar();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readDouble();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readFloat();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readInt();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readLong();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readUTF();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readShort();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readUnsignedShort();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
try {
message.readUTF();
fail("Should have thrown exception");
} catch (MessageNotReadableException e) {
}
}
//---------- Test that errors are trapped correctly ----------------------//
@Test
public void testReadMethodsCaptureEOFExceptionThrowsMessageEOFEx() throws Exception {
JmsBytesMessageFacade facade = Mockito.mock(JmsBytesMessageFacade.class);
InputStream bytesIn = Mockito.mock(InputStream.class);
Mockito.when(facade.getInputStream()).thenReturn(bytesIn);
Mockito.when(bytesIn.read()).thenThrow(new EOFException());
Mockito.when(bytesIn.read(Mockito.any(byte[].class))).thenThrow(new EOFException());
Mockito.when(bytesIn.read(Mockito.any(byte[].class), Mockito.anyInt(), Mockito.anyInt())).thenThrow(new EOFException());
JmsBytesMessage message = new JmsBytesMessage(facade);
message.reset();
try {
message.readBoolean();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readByte();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readBytes(new byte[10]);
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readBytes(new byte[10], 10);
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readChar();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readDouble();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readFloat();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readInt();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readLong();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readShort();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readUnsignedByte();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readUnsignedShort();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
try {
message.readUTF();
} catch (MessageEOFException ex) {
assertTrue(ex.getCause() instanceof EOFException);
}
}
@Test
public void testReadMethodsCaptureIOExceptionThrowsJMSEx() throws Exception {
JmsBytesMessageFacade facade = Mockito.mock(JmsBytesMessageFacade.class);
InputStream bytesIn = Mockito.mock(InputStream.class);
Mockito.when(facade.getInputStream()).thenReturn(bytesIn);
Mockito.when(bytesIn.read()).thenThrow(new IOException());
Mockito.when(bytesIn.read(Mockito.any(byte[].class))).thenThrow(new IOException());
Mockito.when(bytesIn.read(Mockito.any(byte[].class), Mockito.anyInt(), Mockito.anyInt())).thenThrow(new IOException());
JmsBytesMessage message = new JmsBytesMessage(facade);
message.reset();
try {
message.readBoolean();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readByte();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readBytes(new byte[10]);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readBytes(new byte[10], 10);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readChar();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readDouble();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readFloat();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readInt();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readLong();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readShort();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readUnsignedByte();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readUnsignedShort();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.readUTF();
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
}
@Test
public void testWriteMethodsCaptureIOExceptionThrowsJMSEx() throws Exception {
JmsBytesMessageFacade facade = Mockito.mock(JmsBytesMessageFacade.class);
OutputStream bytesOut = Mockito.mock(OutputStream.class);
Mockito.when(facade.getOutputStream()).thenReturn(bytesOut);
Mockito.doThrow(new IOException()).when(bytesOut).write(Mockito.anyByte());
Mockito.doThrow(new IOException()).when(bytesOut).write(Mockito.any(byte[].class));
Mockito.doThrow(new IOException()).when(bytesOut).write(Mockito.any(byte[].class), Mockito.anyInt(), Mockito.anyInt());
JmsBytesMessage message = new JmsBytesMessage(facade);
try {
message.writeBoolean(false);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeByte((byte) 128);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeBytes(new byte[10]);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeBytes(new byte[10], 0, 10);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeChar('a');
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeDouble(100.0);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeFloat(10.2f);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeInt(125);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeLong(65536L);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeObject("");
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeShort((short) 32768);
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
try {
message.writeUTF("");
} catch (JMSException ex) {
assertTrue(ex.getCause() instanceof IOException);
}
}
@Test
public void testGetBodyThrowsMessageFormatException() throws JMSException {
JmsBytesMessage bytesMessage = factory.createBytesMessage();
bytesMessage.setStringProperty("property", "value");
bytesMessage.writeByte((byte) 1);
bytesMessage.writeInt(22);
try {
bytesMessage.getBody(StringBuffer.class);
fail("should have thrown MessageFormatException");
} catch (MessageFormatException mfe) {
} catch (Exception e) {
fail("should have thrown MessageFormatException");
}
try {
bytesMessage.getBody(String.class);
fail("should have thrown MessageFormatException");
} catch (MessageFormatException mfe) {
} catch (Exception e) {
fail("should have thrown MessageFormatException");
}
try {
bytesMessage.getBody(Map.class);
fail("should have thrown MessageFormatException");
} catch (MessageFormatException mfe) {
} catch (Exception e) {
fail("should have thrown MessageFormatException");
}
try {
bytesMessage.getBody(List.class);
fail("should have thrown MessageFormatException");
} catch (MessageFormatException mfe) {
} catch (Exception e) {
fail("should have thrown MessageFormatException");
}
try {
bytesMessage.getBody(Array.class);
fail("should have thrown MessageFormatException");
} catch (MessageFormatException mfe) {
} catch (Exception e) {
fail("should have thrown MessageFormatException");
}
byte[] read1 = bytesMessage.getBody(byte[].class);
assertNotNull(read1);
byte[] read2 = (byte[]) bytesMessage.getBody(Object.class);
assertNotNull(read2);
}
//---------- Test for misc message methods -------------------------------//
@Test
public void testHashCode() throws Exception {
String messageId = "ID:SOME-ID:0:1:1";
JmsBytesMessage message = factory.createBytesMessage();
message.setJMSMessageID(messageId);
assertEquals(message.getJMSMessageID().hashCode(), messageId.hashCode());
assertEquals(message.hashCode(), messageId.hashCode());
}
@Test
public void testEqualsObject() throws Exception {
String messageId = "ID:SOME-ID:0:1:1";
JmsBytesMessage message1 = factory.createBytesMessage();
JmsBytesMessage message2 = factory.createBytesMessage();
message1.setJMSMessageID(messageId);
assertTrue(!message1.equals(message2));
assertTrue(!message2.equals(message1));
message2.setJMSMessageID(messageId);
assertTrue(message1.equals(message2));
assertTrue(message2.equals(message1));
message2.setJMSMessageID(messageId + "More");
assertTrue(!message1.equals(message2));
assertTrue(!message2.equals(message1));
assertTrue(message1.equals(message1));
assertFalse(message1.equals(null));
assertFalse(message1.equals(""));
}
}
|
googleapis/google-cloud-java | 37,520 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/InstancePropertiesPatch.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* Represents the change that you want to make to the instance properties.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InstancePropertiesPatch}
*/
public final class InstancePropertiesPatch extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InstancePropertiesPatch)
InstancePropertiesPatchOrBuilder {
private static final long serialVersionUID = 0L;
// Use InstancePropertiesPatch.newBuilder() to construct.
private InstancePropertiesPatch(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InstancePropertiesPatch() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InstancePropertiesPatch();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancePropertiesPatch_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 500195327:
return internalGetLabels();
case 86866735:
return internalGetMetadata();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancePropertiesPatch_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InstancePropertiesPatch.class,
com.google.cloud.compute.v1.InstancePropertiesPatch.Builder.class);
}
public static final int LABELS_FIELD_NUMBER = 500195327;
private static final class LabelsDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancePropertiesPatch_LabelsEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public static final int METADATA_FIELD_NUMBER = 86866735;
private static final class MetadataDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancePropertiesPatch_MetadataEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.String> metadata_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMetadata() {
if (metadata_ == null) {
return com.google.protobuf.MapField.emptyMapField(MetadataDefaultEntryHolder.defaultEntry);
}
return metadata_;
}
public int getMetadataCount() {
return internalGetMetadata().getMap().size();
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public boolean containsMetadata(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetMetadata().getMap().containsKey(key);
}
/** Use {@link #getMetadataMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMetadata() {
return getMetadataMap();
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getMetadataMap() {
return internalGetMetadata().getMap();
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getMetadataOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetMetadata().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public java.lang.String getMetadataOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetMetadata().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetMetadata(), MetadataDefaultEntryHolder.defaultEntry, 86866735);
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 500195327);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetMetadata().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> metadata__ =
MetadataDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(86866735, metadata__);
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetLabels().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
LabelsDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(500195327, labels__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.InstancePropertiesPatch)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.InstancePropertiesPatch other =
(com.google.cloud.compute.v1.InstancePropertiesPatch) obj;
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
if (!internalGetMetadata().equals(other.internalGetMetadata())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (!internalGetLabels().getMap().isEmpty()) {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
if (!internalGetMetadata().getMap().isEmpty()) {
hash = (37 * hash) + METADATA_FIELD_NUMBER;
hash = (53 * hash) + internalGetMetadata().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.InstancePropertiesPatch prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents the change that you want to make to the instance properties.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InstancePropertiesPatch}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InstancePropertiesPatch)
com.google.cloud.compute.v1.InstancePropertiesPatchOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancePropertiesPatch_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 500195327:
return internalGetLabels();
case 86866735:
return internalGetMetadata();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 500195327:
return internalGetMutableLabels();
case 86866735:
return internalGetMutableMetadata();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancePropertiesPatch_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InstancePropertiesPatch.class,
com.google.cloud.compute.v1.InstancePropertiesPatch.Builder.class);
}
// Construct using com.google.cloud.compute.v1.InstancePropertiesPatch.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
internalGetMutableLabels().clear();
internalGetMutableMetadata().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancePropertiesPatch_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancePropertiesPatch getDefaultInstanceForType() {
return com.google.cloud.compute.v1.InstancePropertiesPatch.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancePropertiesPatch build() {
com.google.cloud.compute.v1.InstancePropertiesPatch result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancePropertiesPatch buildPartial() {
com.google.cloud.compute.v1.InstancePropertiesPatch result =
new com.google.cloud.compute.v1.InstancePropertiesPatch(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.InstancePropertiesPatch result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.metadata_ = internalGetMetadata();
result.metadata_.makeImmutable();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.InstancePropertiesPatch) {
return mergeFrom((com.google.cloud.compute.v1.InstancePropertiesPatch) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.InstancePropertiesPatch other) {
if (other == com.google.cloud.compute.v1.InstancePropertiesPatch.getDefaultInstance())
return this;
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
bitField0_ |= 0x00000001;
internalGetMutableMetadata().mergeFrom(other.internalGetMetadata());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 694933882:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> metadata__ =
input.readMessage(
MetadataDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableMetadata()
.getMutableMap()
.put(metadata__.getKey(), metadata__.getValue());
bitField0_ |= 0x00000002;
break;
} // case 694933882
case -293404678:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableLabels()
.getMutableMap()
.put(labels__.getKey(), labels__.getValue());
bitField0_ |= 0x00000001;
break;
} // case -293404678
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableLabels() {
if (labels_ == null) {
labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
}
if (!labels_.isMutable()) {
labels_ = labels_.copy();
}
bitField0_ |= 0x00000001;
onChanged();
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearLabels() {
bitField0_ = (bitField0_ & ~0x00000001);
internalGetMutableLabels().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
public Builder removeLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableLabels().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() {
bitField0_ |= 0x00000001;
return internalGetMutableLabels().getMutableMap();
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
public Builder putLabels(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableLabels().getMutableMap().put(key, value);
bitField0_ |= 0x00000001;
return this;
}
/**
*
*
* <pre>
* The label key-value pairs that you want to patch onto the instance.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableLabels().getMutableMap().putAll(values);
bitField0_ |= 0x00000001;
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> metadata_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMetadata() {
if (metadata_ == null) {
return com.google.protobuf.MapField.emptyMapField(MetadataDefaultEntryHolder.defaultEntry);
}
return metadata_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableMetadata() {
if (metadata_ == null) {
metadata_ =
com.google.protobuf.MapField.newMapField(MetadataDefaultEntryHolder.defaultEntry);
}
if (!metadata_.isMutable()) {
metadata_ = metadata_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return metadata_;
}
public int getMetadataCount() {
return internalGetMetadata().getMap().size();
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public boolean containsMetadata(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetMetadata().getMap().containsKey(key);
}
/** Use {@link #getMetadataMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMetadata() {
return getMetadataMap();
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getMetadataMap() {
return internalGetMetadata().getMap();
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getMetadataOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetMetadata().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
@java.lang.Override
public java.lang.String getMetadataOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetMetadata().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearMetadata() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableMetadata().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
public Builder removeMetadata(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableMetadata().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableMetadata() {
bitField0_ |= 0x00000002;
return internalGetMutableMetadata().getMutableMap();
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
public Builder putMetadata(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableMetadata().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* The metadata key-value pairs that you want to patch onto the instance. For more information, see Project and instance metadata.
* </pre>
*
* <code>map<string, string> metadata = 86866735;</code>
*/
public Builder putAllMetadata(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableMetadata().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InstancePropertiesPatch)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InstancePropertiesPatch)
private static final com.google.cloud.compute.v1.InstancePropertiesPatch DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.InstancePropertiesPatch();
}
public static com.google.cloud.compute.v1.InstancePropertiesPatch getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InstancePropertiesPatch> PARSER =
new com.google.protobuf.AbstractParser<InstancePropertiesPatch>() {
@java.lang.Override
public InstancePropertiesPatch parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InstancePropertiesPatch> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InstancePropertiesPatch> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancePropertiesPatch getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,587 | java-publicca/proto-google-cloud-publicca-v1beta1/src/main/java/com/google/cloud/security/publicca/v1beta1/CreateExternalAccountKeyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/security/publicca/v1beta1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.security.publicca.v1beta1;
/**
*
*
* <pre>
* Creates a new
* [ExternalAccountKey][google.cloud.security.publicca.v1beta1.ExternalAccountKey]
* in a given project.
* </pre>
*
* Protobuf type {@code google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest}
*/
public final class CreateExternalAccountKeyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest)
CreateExternalAccountKeyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateExternalAccountKeyRequest.newBuilder() to construct.
private CreateExternalAccountKeyRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateExternalAccountKeyRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateExternalAccountKeyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.security.publicca.v1beta1.ServiceProto
.internal_static_google_cloud_security_publicca_v1beta1_CreateExternalAccountKeyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.security.publicca.v1beta1.ServiceProto
.internal_static_google_cloud_security_publicca_v1beta1_CreateExternalAccountKeyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest.class,
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest.Builder
.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this external_account_key will be
* created. Format: projects/[project_id]/locations/[location]. At present
* only the "global" location is supported.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this external_account_key will be
* created. Format: projects/[project_id]/locations/[location]. At present
* only the "global" location is supported.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EXTERNAL_ACCOUNT_KEY_FIELD_NUMBER = 2;
private com.google.cloud.security.publicca.v1beta1.ExternalAccountKey externalAccountKey_;
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the externalAccountKey field is set.
*/
@java.lang.Override
public boolean hasExternalAccountKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The externalAccountKey.
*/
@java.lang.Override
public com.google.cloud.security.publicca.v1beta1.ExternalAccountKey getExternalAccountKey() {
return externalAccountKey_ == null
? com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.getDefaultInstance()
: externalAccountKey_;
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.security.publicca.v1beta1.ExternalAccountKeyOrBuilder
getExternalAccountKeyOrBuilder() {
return externalAccountKey_ == null
? com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.getDefaultInstance()
: externalAccountKey_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getExternalAccountKey());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExternalAccountKey());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest)) {
return super.equals(obj);
}
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest other =
(com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasExternalAccountKey() != other.hasExternalAccountKey()) return false;
if (hasExternalAccountKey()) {
if (!getExternalAccountKey().equals(other.getExternalAccountKey())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasExternalAccountKey()) {
hash = (37 * hash) + EXTERNAL_ACCOUNT_KEY_FIELD_NUMBER;
hash = (53 * hash) + getExternalAccountKey().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Creates a new
* [ExternalAccountKey][google.cloud.security.publicca.v1beta1.ExternalAccountKey]
* in a given project.
* </pre>
*
* Protobuf type {@code google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest)
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.security.publicca.v1beta1.ServiceProto
.internal_static_google_cloud_security_publicca_v1beta1_CreateExternalAccountKeyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.security.publicca.v1beta1.ServiceProto
.internal_static_google_cloud_security_publicca_v1beta1_CreateExternalAccountKeyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest.class,
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest.Builder
.class);
}
// Construct using
// com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getExternalAccountKeyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
externalAccountKey_ = null;
if (externalAccountKeyBuilder_ != null) {
externalAccountKeyBuilder_.dispose();
externalAccountKeyBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.security.publicca.v1beta1.ServiceProto
.internal_static_google_cloud_security_publicca_v1beta1_CreateExternalAccountKeyRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
getDefaultInstanceForType() {
return com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest build() {
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
buildPartial() {
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest result =
new com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.externalAccountKey_ =
externalAccountKeyBuilder_ == null
? externalAccountKey_
: externalAccountKeyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest) {
return mergeFrom(
(com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest other) {
if (other
== com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasExternalAccountKey()) {
mergeExternalAccountKey(other.getExternalAccountKey());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getExternalAccountKeyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this external_account_key will be
* created. Format: projects/[project_id]/locations/[location]. At present
* only the "global" location is supported.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this external_account_key will be
* created. Format: projects/[project_id]/locations/[location]. At present
* only the "global" location is supported.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this external_account_key will be
* created. Format: projects/[project_id]/locations/[location]. At present
* only the "global" location is supported.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this external_account_key will be
* created. Format: projects/[project_id]/locations/[location]. At present
* only the "global" location is supported.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this external_account_key will be
* created. Format: projects/[project_id]/locations/[location]. At present
* only the "global" location is supported.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.security.publicca.v1beta1.ExternalAccountKey externalAccountKey_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey,
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.Builder,
com.google.cloud.security.publicca.v1beta1.ExternalAccountKeyOrBuilder>
externalAccountKeyBuilder_;
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the externalAccountKey field is set.
*/
public boolean hasExternalAccountKey() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The externalAccountKey.
*/
public com.google.cloud.security.publicca.v1beta1.ExternalAccountKey getExternalAccountKey() {
if (externalAccountKeyBuilder_ == null) {
return externalAccountKey_ == null
? com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.getDefaultInstance()
: externalAccountKey_;
} else {
return externalAccountKeyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setExternalAccountKey(
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey value) {
if (externalAccountKeyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
externalAccountKey_ = value;
} else {
externalAccountKeyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setExternalAccountKey(
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.Builder builderForValue) {
if (externalAccountKeyBuilder_ == null) {
externalAccountKey_ = builderForValue.build();
} else {
externalAccountKeyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeExternalAccountKey(
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey value) {
if (externalAccountKeyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& externalAccountKey_ != null
&& externalAccountKey_
!= com.google.cloud.security.publicca.v1beta1.ExternalAccountKey
.getDefaultInstance()) {
getExternalAccountKeyBuilder().mergeFrom(value);
} else {
externalAccountKey_ = value;
}
} else {
externalAccountKeyBuilder_.mergeFrom(value);
}
if (externalAccountKey_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearExternalAccountKey() {
bitField0_ = (bitField0_ & ~0x00000002);
externalAccountKey_ = null;
if (externalAccountKeyBuilder_ != null) {
externalAccountKeyBuilder_.dispose();
externalAccountKeyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.Builder
getExternalAccountKeyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getExternalAccountKeyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.security.publicca.v1beta1.ExternalAccountKeyOrBuilder
getExternalAccountKeyOrBuilder() {
if (externalAccountKeyBuilder_ != null) {
return externalAccountKeyBuilder_.getMessageOrBuilder();
} else {
return externalAccountKey_ == null
? com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.getDefaultInstance()
: externalAccountKey_;
}
}
/**
*
*
* <pre>
* Required. The external account key to create. This field only exists to
* future-proof the API. At present, all fields in ExternalAccountKey are
* output only and all values are ignored. For the purpose of the
* CreateExternalAccountKeyRequest, set it to a default/empty value.
* </pre>
*
* <code>
* .google.cloud.security.publicca.v1beta1.ExternalAccountKey external_account_key = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey,
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.Builder,
com.google.cloud.security.publicca.v1beta1.ExternalAccountKeyOrBuilder>
getExternalAccountKeyFieldBuilder() {
if (externalAccountKeyBuilder_ == null) {
externalAccountKeyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey,
com.google.cloud.security.publicca.v1beta1.ExternalAccountKey.Builder,
com.google.cloud.security.publicca.v1beta1.ExternalAccountKeyOrBuilder>(
getExternalAccountKey(), getParentForChildren(), isClean());
externalAccountKey_ = null;
}
return externalAccountKeyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest)
private static final com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest();
}
public static com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateExternalAccountKeyRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateExternalAccountKeyRequest>() {
@java.lang.Override
public CreateExternalAccountKeyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateExternalAccountKeyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateExternalAccountKeyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.security.publicca.v1beta1.CreateExternalAccountKeyRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,456 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/ListConversationsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/conversation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [Conversations.ListConversations][google.cloud.dialogflow.v2.Conversations.ListConversations].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListConversationsRequest}
*/
public final class ListConversationsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ListConversationsRequest)
ListConversationsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListConversationsRequest.newBuilder() to construct.
private ListConversationsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListConversationsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListConversationsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListConversationsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListConversationsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListConversationsRequest.class,
com.google.cloud.dialogflow.v2.ListConversationsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project from which to list all conversation.
* Format: `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The project from which to list all conversation.
* Format: `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter expression that filters conversations listed in the
* response. Only `lifecycle_state` can be filtered on in this way. For
* example, the following expression only returns `COMPLETED` conversations:
*
* `lifecycle_state = "COMPLETED"`
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A filter expression that filters conversations listed in the
* response. Only `lifecycle_state` can be filtered on in this way. For
* example, the following expression only returns `COMPLETED` conversations:
*
* `lifecycle_state = "COMPLETED"`
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.ListConversationsRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.ListConversationsRequest other =
(com.google.cloud.dialogflow.v2.ListConversationsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2.ListConversationsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Conversations.ListConversations][google.cloud.dialogflow.v2.Conversations.ListConversations].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListConversationsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ListConversationsRequest)
com.google.cloud.dialogflow.v2.ListConversationsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListConversationsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListConversationsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListConversationsRequest.class,
com.google.cloud.dialogflow.v2.ListConversationsRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.ListConversationsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListConversationsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListConversationsRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.ListConversationsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListConversationsRequest build() {
com.google.cloud.dialogflow.v2.ListConversationsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListConversationsRequest buildPartial() {
com.google.cloud.dialogflow.v2.ListConversationsRequest result =
new com.google.cloud.dialogflow.v2.ListConversationsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2.ListConversationsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.ListConversationsRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2.ListConversationsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.ListConversationsRequest other) {
if (other == com.google.cloud.dialogflow.v2.ListConversationsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project from which to list all conversation.
* Format: `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The project from which to list all conversation.
* Format: `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The project from which to list all conversation.
* Format: `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project from which to list all conversation.
* Format: `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project from which to list all conversation.
* Format: `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter expression that filters conversations listed in the
* response. Only `lifecycle_state` can be filtered on in this way. For
* example, the following expression only returns `COMPLETED` conversations:
*
* `lifecycle_state = "COMPLETED"`
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter expression that filters conversations listed in the
* response. Only `lifecycle_state` can be filtered on in this way. For
* example, the following expression only returns `COMPLETED` conversations:
*
* `lifecycle_state = "COMPLETED"`
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter expression that filters conversations listed in the
* response. Only `lifecycle_state` can be filtered on in this way. For
* example, the following expression only returns `COMPLETED` conversations:
*
* `lifecycle_state = "COMPLETED"`
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter expression that filters conversations listed in the
* response. Only `lifecycle_state` can be filtered on in this way. For
* example, the following expression only returns `COMPLETED` conversations:
*
* `lifecycle_state = "COMPLETED"`
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter expression that filters conversations listed in the
* response. Only `lifecycle_state` can be filtered on in this way. For
* example, the following expression only returns `COMPLETED` conversations:
*
* `lifecycle_state = "COMPLETED"`
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ListConversationsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ListConversationsRequest)
private static final com.google.cloud.dialogflow.v2.ListConversationsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ListConversationsRequest();
}
public static com.google.cloud.dialogflow.v2.ListConversationsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListConversationsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListConversationsRequest>() {
@java.lang.Override
public ListConversationsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListConversationsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListConversationsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListConversationsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,477 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ListAttachmentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/attachment.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* The response from listing attachments.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListAttachmentsResponse}
*/
public final class ListAttachmentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ListAttachmentsResponse)
ListAttachmentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAttachmentsResponse.newBuilder() to construct.
private ListAttachmentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAttachmentsResponse() {
attachments_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAttachmentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse.class,
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse.Builder.class);
}
public static final int ATTACHMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.devtools.artifactregistry.v1.Attachment> attachments_;
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.devtools.artifactregistry.v1.Attachment> getAttachmentsList() {
return attachments_;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>
getAttachmentsOrBuilderList() {
return attachments_;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
@java.lang.Override
public int getAttachmentsCount() {
return attachments_.size();
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.Attachment getAttachments(int index) {
return attachments_.get(index);
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.AttachmentOrBuilder getAttachmentsOrBuilder(
int index) {
return attachments_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of attachments, or empty if there are
* no more attachments to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of attachments, or empty if there are
* no more attachments to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < attachments_.size(); i++) {
output.writeMessage(1, attachments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < attachments_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, attachments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.ListAttachmentsResponse)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse other =
(com.google.devtools.artifactregistry.v1.ListAttachmentsResponse) obj;
if (!getAttachmentsList().equals(other.getAttachmentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAttachmentsCount() > 0) {
hash = (37 * hash) + ATTACHMENTS_FIELD_NUMBER;
hash = (53 * hash) + getAttachmentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response from listing attachments.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListAttachmentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ListAttachmentsResponse)
com.google.devtools.artifactregistry.v1.ListAttachmentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse.class,
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.ListAttachmentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (attachmentsBuilder_ == null) {
attachments_ = java.util.Collections.emptyList();
} else {
attachments_ = null;
attachmentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsResponse_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsResponse
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.ListAttachmentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsResponse build() {
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsResponse buildPartial() {
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse result =
new com.google.devtools.artifactregistry.v1.ListAttachmentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse result) {
if (attachmentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
attachments_ = java.util.Collections.unmodifiableList(attachments_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.attachments_ = attachments_;
} else {
result.attachments_ = attachmentsBuilder_.build();
}
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.ListAttachmentsResponse) {
return mergeFrom((com.google.devtools.artifactregistry.v1.ListAttachmentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.devtools.artifactregistry.v1.ListAttachmentsResponse other) {
if (other
== com.google.devtools.artifactregistry.v1.ListAttachmentsResponse.getDefaultInstance())
return this;
if (attachmentsBuilder_ == null) {
if (!other.attachments_.isEmpty()) {
if (attachments_.isEmpty()) {
attachments_ = other.attachments_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAttachmentsIsMutable();
attachments_.addAll(other.attachments_);
}
onChanged();
}
} else {
if (!other.attachments_.isEmpty()) {
if (attachmentsBuilder_.isEmpty()) {
attachmentsBuilder_.dispose();
attachmentsBuilder_ = null;
attachments_ = other.attachments_;
bitField0_ = (bitField0_ & ~0x00000001);
attachmentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAttachmentsFieldBuilder()
: null;
} else {
attachmentsBuilder_.addAllMessages(other.attachments_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.devtools.artifactregistry.v1.Attachment m =
input.readMessage(
com.google.devtools.artifactregistry.v1.Attachment.parser(),
extensionRegistry);
if (attachmentsBuilder_ == null) {
ensureAttachmentsIsMutable();
attachments_.add(m);
} else {
attachmentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.devtools.artifactregistry.v1.Attachment> attachments_ =
java.util.Collections.emptyList();
private void ensureAttachmentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
attachments_ =
new java.util.ArrayList<com.google.devtools.artifactregistry.v1.Attachment>(
attachments_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Attachment,
com.google.devtools.artifactregistry.v1.Attachment.Builder,
com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>
attachmentsBuilder_;
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.Attachment> getAttachmentsList() {
if (attachmentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(attachments_);
} else {
return attachmentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public int getAttachmentsCount() {
if (attachmentsBuilder_ == null) {
return attachments_.size();
} else {
return attachmentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Attachment getAttachments(int index) {
if (attachmentsBuilder_ == null) {
return attachments_.get(index);
} else {
return attachmentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder setAttachments(
int index, com.google.devtools.artifactregistry.v1.Attachment value) {
if (attachmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttachmentsIsMutable();
attachments_.set(index, value);
onChanged();
} else {
attachmentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder setAttachments(
int index, com.google.devtools.artifactregistry.v1.Attachment.Builder builderForValue) {
if (attachmentsBuilder_ == null) {
ensureAttachmentsIsMutable();
attachments_.set(index, builderForValue.build());
onChanged();
} else {
attachmentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder addAttachments(com.google.devtools.artifactregistry.v1.Attachment value) {
if (attachmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttachmentsIsMutable();
attachments_.add(value);
onChanged();
} else {
attachmentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder addAttachments(
int index, com.google.devtools.artifactregistry.v1.Attachment value) {
if (attachmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttachmentsIsMutable();
attachments_.add(index, value);
onChanged();
} else {
attachmentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder addAttachments(
com.google.devtools.artifactregistry.v1.Attachment.Builder builderForValue) {
if (attachmentsBuilder_ == null) {
ensureAttachmentsIsMutable();
attachments_.add(builderForValue.build());
onChanged();
} else {
attachmentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder addAttachments(
int index, com.google.devtools.artifactregistry.v1.Attachment.Builder builderForValue) {
if (attachmentsBuilder_ == null) {
ensureAttachmentsIsMutable();
attachments_.add(index, builderForValue.build());
onChanged();
} else {
attachmentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder addAllAttachments(
java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1.Attachment> values) {
if (attachmentsBuilder_ == null) {
ensureAttachmentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, attachments_);
onChanged();
} else {
attachmentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder clearAttachments() {
if (attachmentsBuilder_ == null) {
attachments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
attachmentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public Builder removeAttachments(int index) {
if (attachmentsBuilder_ == null) {
ensureAttachmentsIsMutable();
attachments_.remove(index);
onChanged();
} else {
attachmentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Attachment.Builder getAttachmentsBuilder(
int index) {
return getAttachmentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.AttachmentOrBuilder getAttachmentsOrBuilder(
int index) {
if (attachmentsBuilder_ == null) {
return attachments_.get(index);
} else {
return attachmentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public java.util.List<? extends com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>
getAttachmentsOrBuilderList() {
if (attachmentsBuilder_ != null) {
return attachmentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attachments_);
}
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Attachment.Builder addAttachmentsBuilder() {
return getAttachmentsFieldBuilder()
.addBuilder(com.google.devtools.artifactregistry.v1.Attachment.getDefaultInstance());
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Attachment.Builder addAttachmentsBuilder(
int index) {
return getAttachmentsFieldBuilder()
.addBuilder(
index, com.google.devtools.artifactregistry.v1.Attachment.getDefaultInstance());
}
/**
*
*
* <pre>
* The attachments returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Attachment attachments = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.Attachment.Builder>
getAttachmentsBuilderList() {
return getAttachmentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Attachment,
com.google.devtools.artifactregistry.v1.Attachment.Builder,
com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>
getAttachmentsFieldBuilder() {
if (attachmentsBuilder_ == null) {
attachmentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Attachment,
com.google.devtools.artifactregistry.v1.Attachment.Builder,
com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>(
attachments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
attachments_ = null;
}
return attachmentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of attachments, or empty if there are
* no more attachments to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of attachments, or empty if there are
* no more attachments to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of attachments, or empty if there are
* no more attachments to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of attachments, or empty if there are
* no more attachments to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of attachments, or empty if there are
* no more attachments to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ListAttachmentsResponse)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ListAttachmentsResponse)
private static final com.google.devtools.artifactregistry.v1.ListAttachmentsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ListAttachmentsResponse();
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAttachmentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAttachmentsResponse>() {
@java.lang.Override
public ListAttachmentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAttachmentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAttachmentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/fineract | 38,032 | fineract-investor/src/main/java/org/apache/fineract/investor/service/ExternalAssetOwnersWriteServiceImpl.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.investor.service;
import static org.apache.fineract.investor.data.ExternalTransferStatus.ACTIVE_INTERMEDIATE;
import static org.apache.fineract.investor.data.ExternalTransferStatus.PENDING;
import static org.apache.fineract.investor.data.ExternalTransferStatus.PENDING_INTERMEDIATE;
import com.google.gson.JsonElement;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.apache.fineract.cob.data.LoanDataForExternalTransfer;
import org.apache.fineract.infrastructure.configuration.domain.ConfigurationDomainService;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.data.ApiParameterError;
import org.apache.fineract.infrastructure.core.data.CommandProcessingResult;
import org.apache.fineract.infrastructure.core.data.CommandProcessingResultBuilder;
import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder;
import org.apache.fineract.infrastructure.core.domain.ExternalId;
import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import org.apache.fineract.infrastructure.core.serialization.FromJsonHelper;
import org.apache.fineract.infrastructure.core.serialization.JsonParserHelper;
import org.apache.fineract.infrastructure.core.service.DateUtils;
import org.apache.fineract.infrastructure.core.service.ExternalIdFactory;
import org.apache.fineract.infrastructure.core.service.ThreadLocalContextUtil;
import org.apache.fineract.investor.data.ExternalTransferData;
import org.apache.fineract.investor.data.ExternalTransferRequestParameters;
import org.apache.fineract.investor.data.ExternalTransferStatus;
import org.apache.fineract.investor.data.ExternalTransferSubStatus;
import org.apache.fineract.investor.domain.ExternalAssetOwner;
import org.apache.fineract.investor.domain.ExternalAssetOwnerRepository;
import org.apache.fineract.investor.domain.ExternalAssetOwnerTransfer;
import org.apache.fineract.investor.domain.ExternalAssetOwnerTransferRepository;
import org.apache.fineract.investor.exception.ExternalAssetOwnerInitiateTransferException;
import org.apache.fineract.portfolio.loanaccount.domain.LoanRepository;
import org.apache.fineract.portfolio.loanaccount.domain.LoanStatus;
import org.apache.fineract.portfolio.loanaccount.exception.LoanNotFoundException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@RequiredArgsConstructor
public class ExternalAssetOwnersWriteServiceImpl implements ExternalAssetOwnersWriteService {
private static final LocalDate FUTURE_DATE_9999_12_31 = LocalDate.of(9999, 12, 31);
private static final List<ExternalTransferStatus> BUYBACK_READY_STATUSES = List.of(ExternalTransferStatus.PENDING,
ExternalTransferStatus.ACTIVE);
private static final List<ExternalTransferStatus> BUYBACK_READY_STATUSES_FOR_DELAY_SETTLEMENT = List
.of(ExternalTransferStatus.ACTIVE_INTERMEDIATE, ExternalTransferStatus.ACTIVE);
private final ExternalAssetOwnerTransferRepository externalAssetOwnerTransferRepository;
private final ExternalAssetOwnerRepository externalAssetOwnerRepository;
private final FromJsonHelper fromApiJsonHelper;
private final LoanRepository loanRepository;
private final DelayedSettlementAttributeService delayedSettlementAttributeService;
private final ConfigurationDomainService configurationDomainService;
private final ExternalAssetOwnersReadService externalAssetOwnersReadService;
@Override
@Transactional
public CommandProcessingResult intermediarySaleLoanByLoanId(JsonCommand command) {
final JsonElement json = fromApiJsonHelper.parse(command.json());
validateIntermediarySaleRequestBody(command.json());
Long loanId = command.getLoanId();
LoanDataForExternalTransfer loanDataForExternalTransfer = fetchAndValidateLoanDataForExternalTransfer(loanId);
if (!delayedSettlementAttributeService.isEnabled(loanDataForExternalTransfer.getLoanProductId())) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("Delayed Settlement Configuration is not enabled for the loan product: %s",
loanDataForExternalTransfer.getLoanProductShortName()));
}
ExternalId externalId = getTransferExternalIdFromJson(json);
validateExternalId(externalId);
validateLoanStatusIntermediarySale(loanDataForExternalTransfer);
ExternalAssetOwnerTransfer intermediarySaleTransfer = createIntermediarySaleTransfer(loanId, json,
loanDataForExternalTransfer.getExternalId());
validateIntermediarySale(intermediarySaleTransfer);
externalAssetOwnerTransferRepository.saveAndFlush(intermediarySaleTransfer);
return buildResponseData(intermediarySaleTransfer);
}
@Override
@Transactional
public CommandProcessingResult saleLoanByLoanId(JsonCommand command) {
final JsonElement json = fromApiJsonHelper.parse(command.json());
final LoanDataForExternalTransfer loanDataForExternalTransfer = fetchAndValidateLoanDataForExternalTransfer(command.getLoanId());
final boolean isDelayedSettlementEnabled = delayedSettlementAttributeService
.isEnabled(loanDataForExternalTransfer.getLoanProductId());
validateSaleRequestBody(command.json());
ExternalId externalId = getTransferExternalIdFromJson(json);
validateExternalId(externalId);
Long loanId = command.getLoanId();
validateLoanStatus(loanDataForExternalTransfer, isDelayedSettlementEnabled);
ExternalAssetOwnerTransfer externalAssetOwnerTransfer = createSaleTransfer(loanId, json,
loanDataForExternalTransfer.getExternalId());
validateSale(externalAssetOwnerTransfer, isDelayedSettlementEnabled);
externalAssetOwnerTransferRepository.saveAndFlush(externalAssetOwnerTransfer);
return buildResponseData(externalAssetOwnerTransfer);
}
@Override
@Transactional
public CommandProcessingResult buybackLoanByLoanId(JsonCommand command) {
final JsonElement json = fromApiJsonHelper.parse(command.json());
validateBuybackRequestBody(command.json());
LoanDataForExternalTransfer loanDataForExternalTransfer = fetchAndValidateLoanDataForExternalTransfer(command.getLoanId());
LocalDate settlementDate = getSettlementDateFromJson(json);
ExternalId externalId = getTransferExternalIdFromJson(json);
validateSettlementDate(settlementDate);
validateExternalId(externalId);
ExternalAssetOwnerTransfer effectiveTransfer = fetchAndValidateEffectiveTransferForBuyback(loanDataForExternalTransfer,
settlementDate);
ExternalAssetOwnerTransfer externalAssetOwnerTransfer = createBuybackTransfer(effectiveTransfer, settlementDate, externalId);
externalAssetOwnerTransferRepository.saveAndFlush(externalAssetOwnerTransfer);
return buildResponseData(externalAssetOwnerTransfer);
}
private void validateExternalId(ExternalId externalId) {
boolean alreadyExists = externalAssetOwnerTransferRepository
.exists((root, query, criteriaBuilder) -> criteriaBuilder.equal(root.get("externalId"), externalId));
if (alreadyExists) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("Already existing an asset transfer with the provided transfer external id: %s", externalId.getValue()));
}
}
private LoanDataForExternalTransfer fetchAndValidateLoanDataForExternalTransfer(Long loanId) {
return loanRepository.findLoanDataForExternalTransferByLoanId(loanId).orElseThrow(() -> new LoanNotFoundException(loanId));
}
@Override
public CommandProcessingResult cancelTransactionById(JsonCommand command) {
ExternalAssetOwnerTransfer externalAssetOwnerTransfer = fetchAndValidateEffectiveTransferForCancel(command.entityId());
externalAssetOwnerTransfer.setEffectiveDateTo(DateUtils.getBusinessLocalDate());
ExternalAssetOwnerTransfer cancelTransfer = createCancelTransfer(externalAssetOwnerTransfer);
externalAssetOwnerTransferRepository.save(cancelTransfer);
externalAssetOwnerTransferRepository.save(externalAssetOwnerTransfer);
return buildResponseData(cancelTransfer);
}
private void validateEffectiveTransferForSale(final List<ExternalAssetOwnerTransfer> effectiveTransfers) {
if (effectiveTransfers.size() == 2) {
throw new ExternalAssetOwnerInitiateTransferException("This loan cannot be sold, there is already an in progress transfer");
} else if (effectiveTransfers.size() == 1) {
if (PENDING.equals(effectiveTransfers.get(0).getStatus())) {
throw new ExternalAssetOwnerInitiateTransferException(
"External asset owner transfer is already in PENDING state for this loan");
} else if (ExternalTransferStatus.ACTIVE.equals(effectiveTransfers.get(0).getStatus())) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be sold, because it is owned by an external asset owner");
} else {
throw new ExternalAssetOwnerInitiateTransferException(String.format(
"This loan cannot be sold, because it is incorrect state! (transferId = %s)", effectiveTransfers.get(0).getId()));
}
}
}
private void validateEffectiveTransferForDelayedSettlementSale(final List<ExternalAssetOwnerTransfer> effectiveTransfers) {
if (effectiveTransfers.size() > 1) {
throw new ExternalAssetOwnerInitiateTransferException("This loan cannot be sold, there is already an in progress transfer");
} else if (effectiveTransfers.size() == 1) {
if (!ACTIVE_INTERMEDIATE.equals(effectiveTransfers.get(0).getStatus())) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be sold, because it is not in ACTIVE-INTERMEDIATE state.");
}
} else {
throw new ExternalAssetOwnerInitiateTransferException("This loan cannot be sold, no effective transfer found.");
}
}
private void validateEffectiveTransferForIntermediarySale(final ExternalAssetOwnerTransfer externalAssetOwnerTransfer) {
List<ExternalAssetOwnerTransfer> effectiveTransfers = externalAssetOwnerTransferRepository
.findEffectiveTransfersOrderByIdDesc(externalAssetOwnerTransfer.getLoanId(), DateUtils.getBusinessLocalDate());
if (effectiveTransfers.size() > 1) {
throw new ExternalAssetOwnerInitiateTransferException("This loan cannot be sold, there is already an in progress transfer");
} else if (effectiveTransfers.size() == 1) {
if (PENDING_INTERMEDIATE.equals(effectiveTransfers.get(0).getStatus())) {
throw new ExternalAssetOwnerInitiateTransferException(
"External asset owner transfer is already in PENDING_INTERMEDIATE state for this loan");
} else if (ExternalTransferStatus.ACTIVE.equals(effectiveTransfers.get(0).getStatus())) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be sold, because it is owned by an external asset owner");
} else {
throw new ExternalAssetOwnerInitiateTransferException(String.format(
"This loan cannot be sold, because it is incorrect state! (transferId = %s)", effectiveTransfers.get(0).getId()));
}
}
}
private ExternalAssetOwnerTransfer fetchAndValidateEffectiveTransferForBuyback(
final LoanDataForExternalTransfer loanDataForExternalTransfer, final LocalDate settlementDate) {
if (delayedSettlementAttributeService.isEnabled(loanDataForExternalTransfer.getLoanProductId())) {
return fetchAndValidateEffectiveTransferForBuybackWithDelayedSettlement(loanDataForExternalTransfer, settlementDate);
}
List<ExternalAssetOwnerTransfer> effectiveTransfers = externalAssetOwnerTransferRepository
.findEffectiveTransfersOrderByIdDesc(loanDataForExternalTransfer.getId(), DateUtils.getBusinessLocalDate());
if (effectiveTransfers.size() == 0) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be bought back, it is not owned by an external asset owner");
} else if (effectiveTransfers.size() == 2) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be bought back, external asset owner buyback transfer is already in progress");
} else if (!BUYBACK_READY_STATUSES.contains(effectiveTransfers.get(0).getStatus())) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("This loan cannot be bought back, effective transfer is not in right state: %s",
effectiveTransfers.get(0).getStatus()));
} else if (DateUtils.isBefore(settlementDate, effectiveTransfers.get(0).getSettlementDate())) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("This loan cannot be bought back, settlement date is earlier than effective transfer settlement date: %s",
effectiveTransfers.get(0).getSettlementDate()));
}
return effectiveTransfers.get(0);
}
private ExternalAssetOwnerTransfer fetchAndValidateEffectiveTransferForBuybackWithDelayedSettlement(
final LoanDataForExternalTransfer loanDataForExternalTransfer, final LocalDate settlementDate) {
List<ExternalAssetOwnerTransfer> effectiveTransfers = externalAssetOwnerTransferRepository
.findEffectiveTransfersOrderByIdDesc(loanDataForExternalTransfer.getId(), DateUtils.getBusinessLocalDate());
if (effectiveTransfers.isEmpty()) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be bought back, it is not owned by an external asset owner");
}
Set<ExternalTransferStatus> effectiveTransferStatuses = effectiveTransfers.stream().map(ExternalAssetOwnerTransfer::getStatus)
.collect(Collectors.toSet());
if (Set.of(ExternalTransferStatus.ACTIVE_INTERMEDIATE, ExternalTransferStatus.PENDING).equals(effectiveTransferStatuses)) {
throw new ExternalAssetOwnerInitiateTransferException("This loan cannot be bought back, external asset owner sale is pending");
} else if (Set.of(ExternalTransferStatus.ACTIVE_INTERMEDIATE, ExternalTransferStatus.BUYBACK_INTERMEDIATE)
.equals(effectiveTransferStatuses)
|| Set.of(ExternalTransferStatus.ACTIVE, ExternalTransferStatus.BUYBACK).equals(effectiveTransferStatuses)) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be bought back, external asset owner buyback transfer is already in progress");
} else if (!BUYBACK_READY_STATUSES_FOR_DELAY_SETTLEMENT.contains(effectiveTransfers.get(0).getStatus())) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("This loan cannot be bought back, effective transfer is not in right state: %s",
effectiveTransfers.get(0).getStatus()));
} else if (DateUtils.isBefore(settlementDate, effectiveTransfers.get(0).getSettlementDate())) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("This loan cannot be bought back, settlement date is earlier than effective transfer settlement date: %s",
effectiveTransfers.get(0).getSettlementDate()));
}
return effectiveTransfers.get(0);
}
private ExternalAssetOwnerTransfer fetchAndValidateEffectiveTransferForCancel(final Long transferId) {
ExternalAssetOwnerTransfer selectedTransfer = externalAssetOwnerTransferRepository.findById(transferId)
.orElseThrow(() -> new ExternalAssetOwnerInitiateTransferException(
String.format("This loan cannot be cancelled, transfer with id %s does not exist", transferId)));
List<ExternalAssetOwnerTransfer> effective = externalAssetOwnerTransferRepository
.findEffectiveTransfersOrderByIdDesc(selectedTransfer.getLoanId(), DateUtils.getBusinessLocalDate());
if (effective.isEmpty()) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("This loan cannot be cancelled, there is no effective transfer for this loan"));
} else if (!Objects.equals(effective.get(0).getId(), selectedTransfer.getId())) {
throw new ExternalAssetOwnerInitiateTransferException(
String.format("This loan cannot be cancelled, selected transfer is not the latest"));
} else if (selectedTransfer.getStatus() != PENDING && selectedTransfer.getStatus() != ExternalTransferStatus.BUYBACK) {
throw new ExternalAssetOwnerInitiateTransferException(
"This loan cannot be cancelled, the selected transfer status is not pending or buyback");
}
return selectedTransfer;
}
private ExternalAssetOwnerTransfer createBuybackTransfer(ExternalAssetOwnerTransfer effectiveTransfer, LocalDate settlementDate,
ExternalId externalId) {
LocalDate effectiveDateFrom = DateUtils.getBusinessLocalDate();
ExternalAssetOwnerTransfer externalAssetOwnerTransfer = new ExternalAssetOwnerTransfer();
externalAssetOwnerTransfer.setExternalId(externalId);
externalAssetOwnerTransfer.setOwner(effectiveTransfer.getOwner());
externalAssetOwnerTransfer.setStatus(determineStatusAfterBuyback(effectiveTransfer));
externalAssetOwnerTransfer.setLoanId(effectiveTransfer.getLoanId());
externalAssetOwnerTransfer.setExternalLoanId(effectiveTransfer.getExternalLoanId());
externalAssetOwnerTransfer.setSettlementDate(settlementDate);
externalAssetOwnerTransfer.setEffectiveDateFrom(effectiveDateFrom);
externalAssetOwnerTransfer.setEffectiveDateTo(FUTURE_DATE_9999_12_31);
externalAssetOwnerTransfer.setPurchasePriceRatio(effectiveTransfer.getPurchasePriceRatio());
externalAssetOwnerTransfer.setPreviousOwner(effectiveTransfer.getOwner());
return externalAssetOwnerTransfer;
}
private ExternalTransferStatus determineStatusAfterBuyback(ExternalAssetOwnerTransfer effectiveTransfer) {
return switch (effectiveTransfer.getStatus()) {
case PENDING -> ExternalTransferStatus.BUYBACK;
case ACTIVE -> ExternalTransferStatus.BUYBACK;
case ACTIVE_INTERMEDIATE -> ExternalTransferStatus.BUYBACK_INTERMEDIATE;
default -> throw new ExternalAssetOwnerInitiateTransferException(String.format(
"This loan cannot be bought back, effective transfer is not in right state: %s", effectiveTransfer.getStatus()));
};
}
private ExternalAssetOwnerTransfer createCancelTransfer(ExternalAssetOwnerTransfer effectiveTransfer) {
ExternalAssetOwnerTransfer externalAssetOwnerTransfer = new ExternalAssetOwnerTransfer();
externalAssetOwnerTransfer.setExternalId(effectiveTransfer.getExternalId());
externalAssetOwnerTransfer.setStatus(ExternalTransferStatus.CANCELLED);
externalAssetOwnerTransfer.setSubStatus(ExternalTransferSubStatus.USER_REQUESTED);
externalAssetOwnerTransfer.setLoanId(effectiveTransfer.getLoanId());
externalAssetOwnerTransfer.setExternalLoanId(effectiveTransfer.getExternalLoanId());
externalAssetOwnerTransfer.setExternalGroupId(effectiveTransfer.getExternalGroupId());
externalAssetOwnerTransfer.setOwner(effectiveTransfer.getOwner());
externalAssetOwnerTransfer.setSettlementDate(effectiveTransfer.getSettlementDate());
externalAssetOwnerTransfer.setEffectiveDateFrom(effectiveTransfer.getEffectiveDateFrom());
externalAssetOwnerTransfer.setEffectiveDateTo(effectiveTransfer.getEffectiveDateTo());
externalAssetOwnerTransfer.setPurchasePriceRatio(effectiveTransfer.getPurchasePriceRatio());
return externalAssetOwnerTransfer;
}
private CommandProcessingResult buildResponseData(ExternalAssetOwnerTransfer savedExternalAssetOwnerTransfer) {
return new CommandProcessingResultBuilder().withEntityId(savedExternalAssetOwnerTransfer.getId())
.withEntityExternalId(savedExternalAssetOwnerTransfer.getExternalId())
.withSubEntityId(savedExternalAssetOwnerTransfer.getLoanId())
.withSubEntityExternalId(Objects.isNull(savedExternalAssetOwnerTransfer.getExternalLoanId()) ? null
: savedExternalAssetOwnerTransfer.getExternalLoanId())
.build();
}
private void validateSale(ExternalAssetOwnerTransfer externalAssetOwnerTransfer, boolean isDelayedSettlementEnabled) {
validateSettlementDate(externalAssetOwnerTransfer);
final List<ExternalAssetOwnerTransfer> effectiveTransfers = externalAssetOwnerTransferRepository
.findEffectiveTransfersOrderByIdDesc(externalAssetOwnerTransfer.getLoanId(), DateUtils.getBusinessLocalDate());
if (isDelayedSettlementEnabled) {
validateEffectiveTransferForDelayedSettlementSale(effectiveTransfers);
} else {
validateEffectiveTransferForSale(effectiveTransfers);
}
}
private void validateIntermediarySale(ExternalAssetOwnerTransfer externalAssetOwnerTransfer) {
validateSettlementDate(externalAssetOwnerTransfer);
validateEffectiveTransferForIntermediarySale(externalAssetOwnerTransfer);
}
private void validateSettlementDate(ExternalAssetOwnerTransfer externalAssetOwnerTransfer) {
validateSettlementDate(externalAssetOwnerTransfer.getSettlementDate());
}
private void validateSettlementDate(LocalDate settlementDate) {
if (DateUtils.isBeforeBusinessDate(settlementDate)) {
throw new ExternalAssetOwnerInitiateTransferException("Settlement date cannot be in the past");
}
}
private void validateLoanStatus(LoanDataForExternalTransfer loanDataForExternalTransfer, boolean isDelayedSettlementEnabled) {
LoanStatus loanStatus = loanDataForExternalTransfer.getLoanStatus();
if (!getValidLoanStatusList(isDelayedSettlementEnabled).contains(loanStatus)) {
throw new ExternalAssetOwnerInitiateTransferException(String.format("Loan status %s is not valid for transfer.", loanStatus));
}
}
private void validateLoanStatusIntermediarySale(LoanDataForExternalTransfer loanDataForExternalTransfer) {
LoanStatus loanStatus = loanDataForExternalTransfer.getLoanStatus();
if (!getAllowedLoanStatuses().contains(loanStatus)) {
throw new ExternalAssetOwnerInitiateTransferException(String.format("Loan status %s is not valid for transfer.", loanStatus));
}
}
private List<LoanStatus> getValidLoanStatusList(boolean isDelayedSettlementEnabled) {
if (isDelayedSettlementEnabled) {
return getAllowedLoanStatusesForDelayedSettlement();
} else {
return getAllowedLoanStatuses();
}
}
private ExternalAssetOwnerTransfer createSaleTransfer(Long loanId, JsonElement json, ExternalId externalLoanId) {
ExternalAssetOwnerTransfer externalAssetOwnerTransfer = new ExternalAssetOwnerTransfer();
LocalDate effectiveFrom = ThreadLocalContextUtil.getBusinessDate();
ExternalAssetOwner owner = getOwner(json);
externalAssetOwnerTransfer.setOwner(owner);
externalAssetOwnerTransfer.setExternalId(getTransferExternalIdFromJson(json));
externalAssetOwnerTransfer.setStatus(PENDING);
externalAssetOwnerTransfer.setPurchasePriceRatio(getPurchasePriceRatioFromJson(json));
externalAssetOwnerTransfer.setSettlementDate(getSettlementDateFromJson(json));
externalAssetOwnerTransfer.setEffectiveDateFrom(effectiveFrom);
externalAssetOwnerTransfer.setEffectiveDateTo(FUTURE_DATE_9999_12_31);
externalAssetOwnerTransfer.setLoanId(loanId);
externalAssetOwnerTransfer.setExternalLoanId(externalLoanId);
externalAssetOwnerTransfer.setExternalGroupId(getTransferExternalGroupIdFromJson(json));
findPreviousAssetOwner(loanId).ifPresent(externalAssetOwnerTransfer::setPreviousOwner);
return externalAssetOwnerTransfer;
}
private ExternalAssetOwnerTransfer createIntermediarySaleTransfer(Long loanId, JsonElement json, ExternalId externalLoanId) {
ExternalAssetOwnerTransfer externalAssetOwnerTransfer = new ExternalAssetOwnerTransfer();
LocalDate effectiveFrom = ThreadLocalContextUtil.getBusinessDate();
ExternalAssetOwner owner = getOwner(json);
externalAssetOwnerTransfer.setOwner(owner);
externalAssetOwnerTransfer.setExternalId(getTransferExternalIdFromJson(json));
externalAssetOwnerTransfer.setStatus(PENDING_INTERMEDIATE);
externalAssetOwnerTransfer.setPurchasePriceRatio(getPurchasePriceRatioFromJson(json));
externalAssetOwnerTransfer.setSettlementDate(getSettlementDateFromJson(json));
externalAssetOwnerTransfer.setEffectiveDateFrom(effectiveFrom);
externalAssetOwnerTransfer.setEffectiveDateTo(FUTURE_DATE_9999_12_31);
externalAssetOwnerTransfer.setLoanId(loanId);
externalAssetOwnerTransfer.setExternalLoanId(externalLoanId);
externalAssetOwnerTransfer.setExternalGroupId(getTransferExternalGroupIdFromJson(json));
findPreviousAssetOwner(loanId).ifPresent(externalAssetOwnerTransfer::setPreviousOwner);
return externalAssetOwnerTransfer;
}
private Optional<ExternalAssetOwner> findPreviousAssetOwner(final Long loanId) {
final ExternalTransferData activeTransfer = externalAssetOwnersReadService.retrieveActiveTransferData(loanId, null, null);
if (activeTransfer != null && activeTransfer.getOwner() != null) {
final String activeOwnerExternalId = activeTransfer.getOwner().getExternalId();
return externalAssetOwnerRepository.findByExternalId(ExternalIdFactory.produce(activeOwnerExternalId));
}
return Optional.empty();
}
private void validateSaleRequestBody(String apiRequestBodyAsJson) {
final Set<String> requestParameters = new HashSet<>(Arrays.asList(ExternalTransferRequestParameters.SETTLEMENT_DATE,
ExternalTransferRequestParameters.OWNER_EXTERNAL_ID, ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID,
ExternalTransferRequestParameters.TRANSFER_EXTERNAL_GROUP_ID, ExternalTransferRequestParameters.PURCHASE_PRICE_RATIO,
ExternalTransferRequestParameters.DATEFORMAT, ExternalTransferRequestParameters.LOCALE));
final Type typeOfMap = new TypeToken<Map<String, Object>>() {
}.getType();
fromApiJsonHelper.checkForUnsupportedParameters(typeOfMap, apiRequestBodyAsJson, requestParameters);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("loantransfer");
final JsonElement json = fromApiJsonHelper.parse(apiRequestBodyAsJson);
String ownerExternalId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.OWNER_EXTERNAL_ID, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.OWNER_EXTERNAL_ID).value(ownerExternalId).notBlank()
.notExceedingLengthOf(100);
String transferExternalId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID).value(transferExternalId).ignoreIfNull()
.notExceedingLengthOf(100);
String purchasePriceRatio = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.PURCHASE_PRICE_RATIO, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.PURCHASE_PRICE_RATIO).value(purchasePriceRatio).notBlank()
.notExceedingLengthOf(50);
LocalDate settlementDate = fromApiJsonHelper.extractLocalDateNamed(ExternalTransferRequestParameters.SETTLEMENT_DATE, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.SETTLEMENT_DATE).value(settlementDate).notNull();
final String transferExternalGroupId = fromApiJsonHelper
.extractStringNamed(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_GROUP_ID, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_GROUP_ID).value(transferExternalGroupId)
.ignoreIfNull().notExceedingLengthOf(100);
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
}
private void validateIntermediarySaleRequestBody(String apiRequestBodyAsJson) {
final Set<String> requestParameters = new HashSet<>(Arrays.asList(ExternalTransferRequestParameters.SETTLEMENT_DATE,
ExternalTransferRequestParameters.OWNER_EXTERNAL_ID, ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID,
ExternalTransferRequestParameters.TRANSFER_EXTERNAL_GROUP_ID, ExternalTransferRequestParameters.PURCHASE_PRICE_RATIO,
ExternalTransferRequestParameters.DATEFORMAT, ExternalTransferRequestParameters.LOCALE));
final Type typeOfMap = new TypeToken<Map<String, Object>>() {
}.getType();
fromApiJsonHelper.checkForUnsupportedParameters(typeOfMap, apiRequestBodyAsJson, requestParameters);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("loantransfer");
final JsonElement json = fromApiJsonHelper.parse(apiRequestBodyAsJson);
String ownerExternalId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.OWNER_EXTERNAL_ID, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.OWNER_EXTERNAL_ID).value(ownerExternalId).notBlank()
.notExceedingLengthOf(100);
String transferExternalId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID).value(transferExternalId).ignoreIfNull()
.notExceedingLengthOf(100);
String purchasePriceRatio = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.PURCHASE_PRICE_RATIO, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.PURCHASE_PRICE_RATIO).value(purchasePriceRatio).notBlank()
.notExceedingLengthOf(50);
LocalDate settlementDate = fromApiJsonHelper.extractLocalDateNamed(ExternalTransferRequestParameters.SETTLEMENT_DATE, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.SETTLEMENT_DATE).value(settlementDate).notNull();
String transferExternalGroupId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_GROUP_ID,
json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_GROUP_ID).value(transferExternalGroupId)
.ignoreIfNull().notExceedingLengthOf(100);
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
}
private void validateBuybackRequestBody(String apiRequestBodyAsJson) {
final Set<String> requestParameters = new HashSet<>(
Arrays.asList(ExternalTransferRequestParameters.SETTLEMENT_DATE, ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID,
ExternalTransferRequestParameters.DATEFORMAT, ExternalTransferRequestParameters.LOCALE));
final Type typeOfMap = new TypeToken<Map<String, Object>>() {
}.getType();
fromApiJsonHelper.checkForUnsupportedParameters(typeOfMap, apiRequestBodyAsJson, requestParameters);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("loantransfer");
final JsonElement json = fromApiJsonHelper.parse(apiRequestBodyAsJson);
String transferExternalId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID).value(transferExternalId).ignoreIfNull()
.notExceedingLengthOf(100);
LocalDate settlementDate = fromApiJsonHelper.extractLocalDateNamed(ExternalTransferRequestParameters.SETTLEMENT_DATE, json);
baseDataValidator.reset().parameter(ExternalTransferRequestParameters.SETTLEMENT_DATE).value(settlementDate).notNull();
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
}
private LocalDate getSettlementDateFromJson(JsonElement json) {
String dateFormat = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.DATEFORMAT, json);
String locale = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.LOCALE, json);
return fromApiJsonHelper.extractLocalDateNamed(ExternalTransferRequestParameters.SETTLEMENT_DATE, json, dateFormat,
JsonParserHelper.localeFromString(locale));
}
private ExternalId getTransferExternalIdFromJson(JsonElement json) {
String transferExternalId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_ID, json);
return StringUtils.isEmpty(transferExternalId) ? ExternalId.generate() : ExternalIdFactory.produce(transferExternalId);
}
private ExternalId getTransferExternalGroupIdFromJson(JsonElement json) {
String transferExternalGroupId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.TRANSFER_EXTERNAL_GROUP_ID,
json);
return StringUtils.isEmpty(transferExternalGroupId) ? null : ExternalIdFactory.produce(transferExternalGroupId);
}
private String getPurchasePriceRatioFromJson(JsonElement json) {
return fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.PURCHASE_PRICE_RATIO, json);
}
private ExternalAssetOwner getOwner(JsonElement json) {
String ownerExternalId = fromApiJsonHelper.extractStringNamed(ExternalTransferRequestParameters.OWNER_EXTERNAL_ID, json);
Optional<ExternalAssetOwner> byExternalId = externalAssetOwnerRepository
.findByExternalId(ExternalIdFactory.produce(ownerExternalId));
return byExternalId.orElseGet(() -> createAndGetAssetOwner(ownerExternalId));
}
private ExternalAssetOwner createAndGetAssetOwner(String externalId) {
ExternalAssetOwner externalAssetOwner = new ExternalAssetOwner();
externalAssetOwner.setExternalId(ExternalIdFactory.produce(externalId));
return externalAssetOwnerRepository.saveAndFlush(externalAssetOwner);
}
private List<LoanStatus> getAllowedLoanStatuses() {
return configurationDomainService.getAllowedLoanStatusesForExternalAssetTransfer().stream().map(LoanStatus::valueOf)
.collect(Collectors.toList());
}
private List<LoanStatus> getAllowedLoanStatusesForDelayedSettlement() {
return configurationDomainService.getAllowedLoanStatusesOfDelayedSettlementForExternalAssetTransfer().stream()
.map(LoanStatus::valueOf).collect(Collectors.toList());
}
}
|
apache/ozone | 37,870 | hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/checksum/TestContainerMerkleTreeWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.checksum;
import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.assertTreesSortedAndMatch;
import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.buildChunk;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.conf.StorageUnit;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.ozone.common.ChecksumByteBuffer;
import org.apache.hadoop.ozone.container.common.helpers.BlockData;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
class TestContainerMerkleTreeWriter {
private ConfigurationSource config;
private long chunkSize;
@BeforeEach
public void init() {
config = new OzoneConfiguration();
chunkSize = (long) config.getStorageSize(
ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY, ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_DEFAULT, StorageUnit.BYTES);
}
@Test
public void testBuildEmptyTree() {
ContainerMerkleTreeWriter tree = new ContainerMerkleTreeWriter();
ContainerProtos.ContainerMerkleTree treeProto = tree.toProto();
assertEquals(0, treeProto.getDataChecksum());
assertEquals(0, treeProto.getBlockMerkleTreeCount());
}
@Test
public void testBuildOneChunkTree() {
// Seed the expected and actual trees with the same chunk.
final long blockID = 1;
ContainerProtos.ChunkInfo chunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
// Build the expected tree proto using the test code.
ContainerProtos.ChunkMerkleTree chunkTree = buildExpectedChunkTree(chunk);
ContainerProtos.BlockMerkleTree blockTree = buildExpectedBlockTree(blockID, chunkTree);
ContainerProtos.ContainerMerkleTree expectedTree = buildExpectedContainerTree(blockTree);
// Use the ContainerMerkleTreeWriter to build the same tree.
ContainerMerkleTreeWriter actualTree = new ContainerMerkleTreeWriter();
actualTree.addChunks(blockID, true, chunk);
// Ensure the trees match.
ContainerProtos.ContainerMerkleTree actualTreeProto = actualTree.toProto();
assertTreesSortedAndMatch(expectedTree, actualTreeProto);
// Do some manual verification of the generated tree as well.
assertNotEquals(0, actualTreeProto.getDataChecksum());
assertEquals(1, actualTreeProto.getBlockMerkleTreeCount());
ContainerProtos.BlockMerkleTree actualBlockTree = actualTreeProto.getBlockMerkleTree(0);
assertEquals(1, actualBlockTree.getBlockID());
assertEquals(1, actualBlockTree.getChunkMerkleTreeCount());
assertNotEquals(0, actualBlockTree.getDataChecksum());
ContainerProtos.ChunkMerkleTree actualChunkTree = actualBlockTree.getChunkMerkleTree(0);
assertEquals(0, actualChunkTree.getOffset());
assertEquals(chunkSize, actualChunkTree.getLength());
assertNotEquals(0, actualChunkTree.getDataChecksum());
}
@Test
public void testBuildTreeWithMissingChunks() {
// These chunks will be used to seed both the expected and actual trees.
final long blockID = 1;
ContainerProtos.ChunkInfo chunk1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
// Chunk 2 is missing.
ContainerProtos.ChunkInfo chunk3 = buildChunk(config, 2, ByteBuffer.wrap(new byte[]{4, 5, 6}));
// Build the expected tree proto using the test code.
ContainerProtos.BlockMerkleTree blockTree = buildExpectedBlockTree(blockID,
buildExpectedChunkTree(chunk1), buildExpectedChunkTree(chunk3));
ContainerProtos.ContainerMerkleTree expectedTree = buildExpectedContainerTree(blockTree);
// Use the ContainerMerkleTree to build the same tree.
ContainerMerkleTreeWriter actualTree = new ContainerMerkleTreeWriter();
actualTree.addChunks(blockID, true, chunk1, chunk3);
// Ensure the trees match.
ContainerProtos.ContainerMerkleTree actualTreeProto = actualTree.toProto();
assertTreesSortedAndMatch(expectedTree, actualTreeProto);
}
@Test
public void testBlockIdIncludedInChecksum() {
// Create a set of chunks to be used in different blocks with identical content.
ContainerProtos.ChunkInfo chunk1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo chunk2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{4, 5, 6}));
// Create two blocks with different IDs but identical chunk data
final long blockID1 = 1;
final long blockID2 = 2;
ContainerMerkleTreeWriter tree1 = new ContainerMerkleTreeWriter();
tree1.addChunks(blockID1, true, chunk1, chunk2);
ContainerMerkleTreeWriter tree2 = new ContainerMerkleTreeWriter();
tree2.addChunks(blockID2, true, chunk1, chunk2);
ContainerProtos.ContainerMerkleTree tree1Proto = tree1.toProto();
ContainerProtos.ContainerMerkleTree tree2Proto = tree2.toProto();
// Even though the chunks are identical, the block checksums should be different
// because the block IDs are different
ContainerProtos.BlockMerkleTree block1 = tree1Proto.getBlockMerkleTree(0);
ContainerProtos.BlockMerkleTree block2 = tree2Proto.getBlockMerkleTree(0);
assertEquals(blockID1, block1.getBlockID());
assertEquals(blockID2, block2.getBlockID());
assertNotEquals(block1.getDataChecksum(), block2.getDataChecksum(),
"Blocks with identical chunks but different IDs should have different checksums");
// Consequently, the container checksums should also be different
assertNotEquals(tree1Proto.getDataChecksum(), tree2Proto.getDataChecksum(),
"Containers with blocks having identical chunks but different IDs should have different checksums");
}
@Test
public void testIdenticalBlocksHaveSameChecksum() {
// Create a set of chunks to be used in different blocks with identical content.
ContainerProtos.ChunkInfo chunk1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo chunk2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{4, 5, 6}));
// Create two blocks with the same ID and identical chunk data
final long blockID = 1;
ContainerMerkleTreeWriter tree1 = new ContainerMerkleTreeWriter();
tree1.addChunks(blockID, true, chunk1, chunk2);
ContainerMerkleTreeWriter tree2 = new ContainerMerkleTreeWriter();
tree2.addChunks(blockID, true, chunk1, chunk2);
ContainerProtos.ContainerMerkleTree tree1Proto = tree1.toProto();
ContainerProtos.ContainerMerkleTree tree2Proto = tree2.toProto();
// Blocks with same ID and identical chunks should have same checksums
ContainerProtos.BlockMerkleTree block1 = tree1Proto.getBlockMerkleTree(0);
ContainerProtos.BlockMerkleTree block2 = tree2Proto.getBlockMerkleTree(0);
assertEquals(blockID, block1.getBlockID());
assertEquals(blockID, block2.getBlockID());
assertEquals(block1.getDataChecksum(), block2.getDataChecksum(),
"Blocks with same ID and identical chunks should have same checksums");
// Container checksums should also be the same
assertEquals(tree1Proto.getDataChecksum(), tree2Proto.getDataChecksum(),
"Containers with identical blocks should have same checksums");
}
@Test
public void testContainerReplicasWithDifferentMissingBlocksHaveDifferentChecksums() {
// Create identical chunk data that will be used across all blocks
ContainerProtos.ChunkInfo chunk1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo chunk2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{4, 5, 6}));
// Scenario: Container has 5 identical blocks, but different replicas are missing different blocks
// Replica 1 is missing block 1 (has blocks 2,3,4,5)
ContainerMerkleTreeWriter replica1 = new ContainerMerkleTreeWriter();
replica1.addChunks(2, true, chunk1, chunk2);
replica1.addChunks(3, true, chunk1, chunk2);
replica1.addChunks(4, true, chunk1, chunk2);
replica1.addChunks(5, true, chunk1, chunk2);
// Replica 2 is missing block 5 (has blocks 1,2,3,4)
ContainerMerkleTreeWriter replica2 = new ContainerMerkleTreeWriter();
replica2.addChunks(1, true, chunk1, chunk2);
replica2.addChunks(2, true, chunk1, chunk2);
replica2.addChunks(3, true, chunk1, chunk2);
replica2.addChunks(4, true, chunk1, chunk2);
ContainerProtos.ContainerMerkleTree replica1Proto = replica1.toProto();
ContainerProtos.ContainerMerkleTree replica2Proto = replica2.toProto();
assertNotEquals(replica1Proto.getDataChecksum(), replica2Proto.getDataChecksum(),
"Container replicas with identical blocks but different missing blocks should have different checksums");
// Verify both replicas have the same number of blocks
assertEquals(4, replica1Proto.getBlockMerkleTreeCount());
assertEquals(4, replica2Proto.getBlockMerkleTreeCount());
}
@Test
public void testBuildTreeWithEmptyBlock() {
final long blockID = 1;
ContainerProtos.BlockMerkleTree blockTree = buildExpectedBlockTree(blockID);
ContainerProtos.ContainerMerkleTree expectedTree = buildExpectedContainerTree(blockTree);
// Use the ContainerMerkleTree to build the same tree.
ContainerMerkleTreeWriter actualTree = new ContainerMerkleTreeWriter();
actualTree.addBlock(blockID);
// Ensure the trees match.
ContainerProtos.ContainerMerkleTree actualTreeProto = actualTree.toProto();
assertTreesSortedAndMatch(expectedTree, actualTreeProto);
}
@Test
public void testAddBlockIdempotent() {
final long blockID = 1;
// Build the expected proto.
ContainerProtos.ChunkInfo chunk1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.BlockMerkleTree blockTree = buildExpectedBlockTree(blockID,
buildExpectedChunkTree(chunk1));
ContainerProtos.ContainerMerkleTree expectedTree = buildExpectedContainerTree(blockTree);
// Use the ContainerMerkleTree to build the same tree, calling addBlock in between adding chunks.
ContainerMerkleTreeWriter actualTree = new ContainerMerkleTreeWriter();
actualTree.addBlock(blockID);
actualTree.addChunks(blockID, true, chunk1);
// This should not overwrite the chunk already added to the block.
actualTree.addBlock(blockID);
// Ensure the trees match.
ContainerProtos.ContainerMerkleTree actualTreeProto = actualTree.toProto();
assertTreesSortedAndMatch(expectedTree, actualTreeProto);
}
/**
* A container is a set of blocks. Make sure the tree implementation is not dependent on continuity of block IDs.
*/
@Test
public void testBuildTreeWithNonContiguousBlockIDs() {
// Seed the expected and actual trees with the same chunks.
final long blockID1 = 1;
final long blockID3 = 3;
ContainerProtos.ChunkInfo b1c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b1c2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b3c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b3c2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{1, 2, 3}));
// Build the expected tree proto using the test code.
ContainerProtos.BlockMerkleTree blockTree1 = buildExpectedBlockTree(blockID1,
buildExpectedChunkTree(b1c1), buildExpectedChunkTree(b1c2));
ContainerProtos.BlockMerkleTree blockTree3 = buildExpectedBlockTree(blockID3,
buildExpectedChunkTree(b3c1), buildExpectedChunkTree(b3c2));
ContainerProtos.ContainerMerkleTree expectedTree = buildExpectedContainerTree(blockTree1, blockTree3);
// Use the ContainerMerkleTree to build the same tree.
// Add blocks and chunks out of order to test sorting.
ContainerMerkleTreeWriter actualTree = new ContainerMerkleTreeWriter();
actualTree.addChunks(blockID3, true, b3c2, b3c1);
actualTree.addChunks(blockID1, true, b1c1, b1c2);
// Ensure the trees match.
ContainerProtos.ContainerMerkleTree actualTreeProto = actualTree.toProto();
assertTreesSortedAndMatch(expectedTree, actualTreeProto);
}
@Test
public void testAppendToBlocksWhileBuilding() throws Exception {
// Seed the expected and actual trees with the same chunks.
final long blockID1 = 1;
final long blockID2 = 2;
final long blockID3 = 3;
ContainerProtos.ChunkInfo b1c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b1c2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{1, 2}));
ContainerProtos.ChunkInfo b1c3 = buildChunk(config, 2, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b2c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b2c2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b3c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1}));
ContainerProtos.ChunkInfo b3c2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{2, 3, 4}));
// Build the expected tree proto using the test code.
ContainerProtos.BlockMerkleTree blockTree1 = buildExpectedBlockTree(blockID1,
buildExpectedChunkTree(b1c1), buildExpectedChunkTree(b1c2), buildExpectedChunkTree(b1c3));
ContainerProtos.BlockMerkleTree blockTree2 = buildExpectedBlockTree(blockID2,
buildExpectedChunkTree(b2c1), buildExpectedChunkTree(b2c2));
ContainerProtos.BlockMerkleTree blockTree3 = buildExpectedBlockTree(blockID3,
buildExpectedChunkTree(b3c1), buildExpectedChunkTree(b3c2));
ContainerProtos.ContainerMerkleTree expectedTree = buildExpectedContainerTree(blockTree1, blockTree2, blockTree3);
// Use the ContainerMerkleTree to build the same tree.
// Test building by adding chunks to the blocks individually and out of order.
ContainerMerkleTreeWriter actualTree = new ContainerMerkleTreeWriter();
// Add all of block 2 first.
actualTree.addChunks(blockID2, true, b2c1, b2c2);
// Then add block 1 in multiple steps wth chunks out of order.
actualTree.addChunks(blockID1, true, b1c2);
actualTree.addChunks(blockID1, true, b1c3, b1c1);
// Add a duplicate chunk to block 3. It should overwrite the existing one.
actualTree.addChunks(blockID3, true, b3c1, b3c2);
actualTree.addChunks(blockID3, true, b3c2);
// Ensure the trees match.
ContainerProtos.ContainerMerkleTree actualTreeProto = actualTree.toProto();
assertTreesSortedAndMatch(expectedTree, actualTreeProto);
}
/**
* Test that the setDeletedBlock method correctly marks blocks as deleted.
*/
@Test
public void testSetDeletedBlock() {
final long blockID1 = 1;
final long blockID2 = 2;
final long deletedChecksum = 123456789L;
ContainerMerkleTreeWriter treeWriter = new ContainerMerkleTreeWriter();
// Add a regular block with chunks first
ContainerProtos.ChunkInfo chunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
treeWriter.addChunks(blockID1, true, chunk);
// Add a deleted block using setDeletedBlock
treeWriter.setDeletedBlock(blockID2, deletedChecksum);
ContainerProtos.ContainerMerkleTree actualTree = treeWriter.toProto();
// Verify we have 2 blocks
assertEquals(2, actualTree.getBlockMerkleTreeCount());
// Find and verify the regular block
ContainerProtos.BlockMerkleTree regularBlock = actualTree.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID1)
.findFirst()
.orElseThrow(() -> new AssertionError("Regular block not found"));
assertEquals(blockID1, regularBlock.getBlockID());
assertFalse(regularBlock.getDeleted());
assertEquals(1, regularBlock.getChunkMerkleTreeCount());
assertNotEquals(0, regularBlock.getDataChecksum());
// Find and verify the deleted block
ContainerProtos.BlockMerkleTree deletedBlock = actualTree.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID2)
.findFirst()
.orElseThrow(() -> new AssertionError("Deleted block not found"));
assertEquals(blockID2, deletedBlock.getBlockID());
assertTrue(deletedBlock.getDeleted());
assertEquals(deletedChecksum, deletedBlock.getDataChecksum());
assertTrue(deletedBlock.getChunkMerkleTreeList().isEmpty(), "Deleted blocks should not have chunk merkle trees");
}
/**
* setDeletedBlock should overwrite any existing block with the checksum provided.
*/
@Test
public void testSetDeletedBlockOverwrite() {
final long blockID = 1;
final long deletedChecksum = 123456789L;
ContainerMerkleTreeWriter treeWriter = new ContainerMerkleTreeWriter();
// Add a regular block with chunks first
ContainerProtos.ChunkInfo chunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
treeWriter.addChunks(blockID, true, chunk);
// Overwrite the block with a deleted entry that has a different checksum.
treeWriter.setDeletedBlock(blockID, deletedChecksum);
ContainerProtos.ContainerMerkleTree actualTree = treeWriter.toProto();
assertEquals(1, actualTree.getBlockMerkleTreeCount());
// Find and verify the overwritten deleted block
ContainerProtos.BlockMerkleTree deletedBlock = actualTree.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID)
.findFirst()
.orElseThrow(() -> new AssertionError("block not found"));
assertEquals(blockID, deletedBlock.getBlockID());
assertTrue(deletedBlock.getDeleted());
assertTrue(deletedBlock.getChunkMerkleTreeList().isEmpty());
assertEquals(deletedChecksum, deletedBlock.getDataChecksum());
}
/**
* Test that a {@link ContainerMerkleTreeWriter} built from a {@link ContainerProtos.ContainerMerkleTree} will
* produce an identical proto as the input when it is written again. This test covers both regular blocks with
* chunks, empty blocks, and deleted blocks to ensure all block types are properly preserved during conversion.
*/
@Test
public void testProtoToWriterConversion() {
final long blockID1 = 1;
final long blockID2 = 2;
final long blockID3 = 3;
final long blockID4 = 4;
final long blockID5 = 5;
final long deletedBlockChecksum = 123456L;
ContainerProtos.ChunkInfo b1c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b1c2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{1, 2}));
ContainerProtos.ChunkInfo b1c3 = buildChunk(config, 2, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b2c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo b2c2 = buildChunk(config, 1, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.BlockMerkleTree blockTree1 = buildExpectedBlockTree(blockID1,
buildExpectedChunkTree(b1c1), buildExpectedChunkTree(b1c2), buildExpectedChunkTree(b1c3));
ContainerProtos.BlockMerkleTree blockTree2 = buildExpectedBlockTree(blockID2,
buildExpectedChunkTree(b2c1), buildExpectedChunkTree(b2c2));
// Test that an empty block is preserved during tree conversion.
ContainerProtos.BlockMerkleTree blockTree3 = buildExpectedBlockTree(blockID3);
// Test that a deleted block is preserved during tree conversion.
ContainerProtos.BlockMerkleTree blockTree4 = buildExpectedDeletedBlockTree(blockID4, deletedBlockChecksum);
ContainerProtos.ContainerMerkleTree expectedTree = buildExpectedContainerTree(blockTree1,
blockTree2, blockTree3, blockTree4);
ContainerMerkleTreeWriter treeWriter = new ContainerMerkleTreeWriter(expectedTree);
ContainerProtos.ContainerMerkleTree actualTree = treeWriter.toProto();
assertTreesSortedAndMatch(expectedTree, actualTree);
// Modifying the tree writer created from the proto should also succeed.
ContainerProtos.ChunkInfo b3c1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1}));
treeWriter.addChunks(blockID3, false, b3c1);
treeWriter.addBlock(blockID5);
blockTree3 = buildExpectedBlockTree(blockID3, buildExpectedChunkTree(b3c1, false));
ContainerProtos.BlockMerkleTree blockTree5 = buildExpectedBlockTree(blockID5);
ContainerProtos.ContainerMerkleTree expectedUpdatedTree = buildExpectedContainerTree(blockTree1,
blockTree2, blockTree3, blockTree4, blockTree5);
assertTreesSortedAndMatch(expectedUpdatedTree, treeWriter.toProto());
}
/**
* Tests adding deleted blocks to an empty tree for cases where the final tree checksum should and should not be
* computed.
*/
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testAddDeletedBlocksToEmptyTree(boolean computeChecksum) {
final long containerId = 1L;
final long blockID1 = 1L;
final long blockID2 = 2L;
ContainerMerkleTreeWriter treeWriter = new ContainerMerkleTreeWriter();
// Create deleted blocks with chunks - always use 2 blocks
List<BlockData> deletedBlocks = Arrays.asList(
ContainerMerkleTreeTestUtils.buildBlockData(config, containerId, blockID1),
ContainerMerkleTreeTestUtils.buildBlockData(config, containerId, blockID2)
);
ContainerProtos.ContainerMerkleTree result = treeWriter.addDeletedBlocks(deletedBlocks, computeChecksum);
// Verify container has 2 blocks
assertEquals(2, result.getBlockMerkleTreeCount());
// Verify both blocks are marked as deleted with no chunks
ContainerProtos.BlockMerkleTree block1 = result.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID1)
.findFirst()
.orElseThrow(() -> new AssertionError("Block 1 not found"));
assertTrue(block1.getDeleted());
assertTrue(block1.getChunkMerkleTreeList().isEmpty());
ContainerProtos.BlockMerkleTree block2 = result.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID2)
.findFirst()
.orElseThrow(() -> new AssertionError("Block 2 not found"));
assertTrue(block2.getDeleted());
assertTrue(block2.getChunkMerkleTreeList().isEmpty());
if (computeChecksum) {
assertTrue(result.hasDataChecksum());
assertNotEquals(0, result.getDataChecksum());
assertTrue(block1.hasDataChecksum());
assertNotEquals(0, block1.getDataChecksum());
assertTrue(block2.hasDataChecksum());
assertNotEquals(0, block2.getDataChecksum());
} else {
// Top level tree checksum should not be populated, but individual blocks will have checksums.
assertFalse(result.hasDataChecksum());
assertTrue(block1.hasDataChecksum());
assertTrue(block2.hasDataChecksum());
}
}
/**
* Test adding deleted blocks to a tree that already has data, including overwriting existing blocks.
*/
@Test
public void testAddDeletedBlocksWithExistingData() {
final long containerId = 1L;
final long blockID1 = 1L;
final long blockID2 = 2L;
final long blockID3 = 3L;
ContainerMerkleTreeWriter treeWriter = new ContainerMerkleTreeWriter();
// Add some existing live blocks
ContainerProtos.ChunkInfo chunk1 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
ContainerProtos.ChunkInfo chunk2 = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{4, 5, 6}));
treeWriter.addChunks(blockID1, true, chunk1); // This will be overwritten
treeWriter.addChunks(blockID2, true, chunk2); // This will remain
// Create deleted blocks - one overlapping, one new
List<BlockData> deletedBlocks = Arrays.asList(
ContainerMerkleTreeTestUtils.buildBlockData(config, containerId, blockID1), // Overwrite existing block
ContainerMerkleTreeTestUtils.buildBlockData(config, containerId, blockID3) // New deleted block
);
ContainerProtos.ContainerMerkleTree result = treeWriter.addDeletedBlocks(deletedBlocks, true);
// Verify we have 3 blocks total
assertEquals(3, result.getBlockMerkleTreeCount());
// Verify block1 was overwritten and is now deleted
ContainerProtos.BlockMerkleTree block1 = result.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID1)
.findFirst()
.orElseThrow(() -> new AssertionError("Block 1 not found"));
assertTrue(block1.getDeleted());
assertTrue(block1.getChunkMerkleTreeList().isEmpty());
// Verify block2 remains live with its chunks
ContainerProtos.BlockMerkleTree block2 = result.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID2)
.findFirst()
.orElseThrow(() -> new AssertionError("Block 2 not found"));
assertFalse(block2.getDeleted());
assertEquals(1, block2.getChunkMerkleTreeCount());
// Verify block3 is the new deleted block
ContainerProtos.BlockMerkleTree block3 = result.getBlockMerkleTreeList().stream()
.filter(b -> b.getBlockID() == blockID3)
.findFirst()
.orElseThrow(() -> new AssertionError("Block 3 not found"));
assertTrue(block3.getDeleted());
assertTrue(block3.getChunkMerkleTreeList().isEmpty());
}
/**
* Test that deleted blocks take precedence when the same block exists in both live and deleted states.
*/
@Test
public void testDeletedBlocksTakePrecedence() {
final long containerId = 1L;
final long blockID = 1L;
ContainerMerkleTreeWriter treeWriter = new ContainerMerkleTreeWriter();
// First add a live block
ContainerProtos.ChunkInfo chunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
treeWriter.addChunks(blockID, true, chunk);
// Get the checksum of the live block
ContainerProtos.ContainerMerkleTree initialTree = treeWriter.toProto();
long liveBlockChecksum = initialTree.getBlockMerkleTree(0).getDataChecksum();
// Now add the same block as deleted - it should overwrite
List<BlockData> deletedBlocks = Collections.singletonList(
ContainerMerkleTreeTestUtils.buildBlockData(config, containerId, blockID)
);
ContainerProtos.ContainerMerkleTree result = treeWriter.addDeletedBlocks(deletedBlocks, true);
assertEquals(1, result.getBlockMerkleTreeCount());
ContainerProtos.BlockMerkleTree finalBlock = result.getBlockMerkleTree(0);
assertTrue(finalBlock.getDeleted());
assertTrue(finalBlock.getChunkMerkleTreeList().isEmpty());
// The checksum should be different since it's computed from the deleted block's data
assertNotEquals(liveBlockChecksum, finalBlock.getDataChecksum());
}
/**
* If both trees contain a block and ours is live while existing is deleted,
* the deleted one supersedes and its checksum should be used.
*/
@Test
public void testUpdateConflictExistingDeleted() {
final long blockID = 1L;
// Our writer has a live block
ContainerMerkleTreeWriter writer = new ContainerMerkleTreeWriter();
ContainerProtos.ChunkInfo chunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{1, 2, 3}));
writer.addChunks(blockID, true, chunk);
// Existing tree marks the same block as deleted with a specific checksum
final long deletedChecksum = 987654321L;
ContainerProtos.BlockMerkleTree existingDeleted = buildExpectedDeletedBlockTree(blockID, deletedChecksum);
ContainerProtos.ContainerMerkleTree existingTree = ContainerProtos.ContainerMerkleTree.newBuilder()
.addBlockMerkleTree(existingDeleted)
.build();
ContainerProtos.ContainerMerkleTree result = writer.update(existingTree);
// Expect the deleted state from existing to override the live state in writer
ContainerProtos.ContainerMerkleTree expected = buildExpectedContainerTree(
buildExpectedDeletedBlockTree(blockID, deletedChecksum));
assertTreesSortedAndMatch(expected, result);
}
/**
* If both trees contain the same live block, our writer's value wins.
*/
@Test
public void testUpdateConflictBothLive() {
final long blockID = 1L;
// Our writer live block with one set of chunks
ContainerMerkleTreeWriter writer = new ContainerMerkleTreeWriter();
ContainerProtos.ChunkInfo ourChunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{10, 20, 30}));
writer.addChunks(blockID, true, ourChunk);
// Existing tree has same blockID but different content
ContainerProtos.ChunkInfo existingChunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{7, 8, 9}));
ContainerProtos.BlockMerkleTree existingLive = buildExpectedBlockTree(blockID,
buildExpectedChunkTree(existingChunk));
ContainerProtos.ContainerMerkleTree existingTree = ContainerProtos.ContainerMerkleTree.newBuilder()
.addBlockMerkleTree(existingLive)
.build();
ContainerProtos.ContainerMerkleTree result = writer.update(existingTree);
// Expect our writer's live block to be preserved
ContainerProtos.ContainerMerkleTree expected = buildExpectedContainerTree(
buildExpectedBlockTree(blockID, buildExpectedChunkTree(ourChunk)));
assertTreesSortedAndMatch(expected, result);
}
/**
* If our writer has a deleted block and the existing tree has it as live,
* our deleted value wins since we have the latest information.
*/
@Test
public void testUpdateConflictExistingLive() {
final long blockID = 3L;
// Our writer marks the block as deleted
final long ourDeletedChecksum = 12345L;
ContainerMerkleTreeWriter writer = new ContainerMerkleTreeWriter();
writer.setDeletedBlock(blockID, ourDeletedChecksum);
// Existing tree has a live version of the block
ContainerProtos.ChunkInfo existingChunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{4, 5, 6}));
ContainerProtos.BlockMerkleTree existingLive = buildExpectedBlockTree(blockID,
buildExpectedChunkTree(existingChunk));
ContainerProtos.ContainerMerkleTree existingTree = ContainerProtos.ContainerMerkleTree.newBuilder()
.addBlockMerkleTree(existingLive)
.build();
ContainerProtos.ContainerMerkleTree result = writer.update(existingTree);
// Expect our deleted entry to be preserved
ContainerProtos.ContainerMerkleTree expected = buildExpectedContainerTree(
buildExpectedDeletedBlockTree(blockID, ourDeletedChecksum));
assertTreesSortedAndMatch(expected, result);
}
/**
* If both the writer's tree and existing tree have deleted versions of a block, our writer's checksum wins.
*/
@Test
public void testUpdateConflictBothDeleted() {
final long blockID = 4L;
final long ourDeletedChecksum = 111L;
final long existingDeletedChecksum = 222L;
ContainerMerkleTreeWriter writer = new ContainerMerkleTreeWriter();
writer.setDeletedBlock(blockID, ourDeletedChecksum);
ContainerProtos.BlockMerkleTree existingDeleted = buildExpectedDeletedBlockTree(blockID, existingDeletedChecksum);
ContainerProtos.ContainerMerkleTree existingTree = ContainerProtos.ContainerMerkleTree.newBuilder()
.addBlockMerkleTree(existingDeleted)
.build();
ContainerProtos.ContainerMerkleTree result = writer.update(existingTree);
ContainerProtos.ContainerMerkleTree expected = buildExpectedContainerTree(
buildExpectedDeletedBlockTree(blockID, ourDeletedChecksum));
assertTreesSortedAndMatch(expected, result);
}
/**
* Merge the existing tree with the tree writer by:
* - including deleted blocks from the existing tree into our tree writer.
* - ignoring live blocks from the existing tree and overwriting them with our tree writer.
*/
@Test
public void testUpdateMergesTrees() {
final long existingLiveBlockID = 5L;
final long existingDeletedBlockID = 6L;
final long existingDeletedChecksum = 555L;
final long ourLiveBlockID = 7L;
final long ourDeletedBlockID = 8L;
final long ourDeletedChecksum = 444L;
// Our writer contains a live block not present in the existing tree
ContainerMerkleTreeWriter writer = new ContainerMerkleTreeWriter();
ContainerProtos.ChunkInfo ourLiveChunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{9, 9, 9}));
writer.addChunks(ourLiveBlockID, true, ourLiveChunk);
// Our writer also includes a deleted block not present in the existing tree
writer.setDeletedBlock(ourDeletedBlockID, ourDeletedChecksum);
// Existing tree contains a deleted block (should be included) and a live block (should be ignored)
ContainerProtos.BlockMerkleTree existingDeleted = buildExpectedDeletedBlockTree(existingDeletedBlockID,
existingDeletedChecksum);
ContainerProtos.ChunkInfo existingLiveChunk = buildChunk(config, 0, ByteBuffer.wrap(new byte[]{7, 7, 7}));
ContainerProtos.BlockMerkleTree existingLiveBlock = buildExpectedBlockTree(existingLiveBlockID,
buildExpectedChunkTree(existingLiveChunk));
ContainerProtos.ContainerMerkleTree existingTree = ContainerProtos.ContainerMerkleTree.newBuilder()
.addBlockMerkleTree(existingDeleted)
.addBlockMerkleTree(existingLiveBlock)
.build();
ContainerProtos.ContainerMerkleTree result = writer.update(existingTree);
// Expect union: our live block + existing deleted block, but not the existing live block
ContainerProtos.ContainerMerkleTree expected = buildExpectedContainerTree(
buildExpectedDeletedBlockTree(existingDeletedBlockID, existingDeletedChecksum),
buildExpectedBlockTree(ourLiveBlockID, buildExpectedChunkTree(ourLiveChunk)),
buildExpectedDeletedBlockTree(ourDeletedBlockID, ourDeletedChecksum));
assertTreesSortedAndMatch(expected, result);
}
private ContainerProtos.ContainerMerkleTree buildExpectedContainerTree(
ContainerProtos.BlockMerkleTree... blocks) {
List<ContainerProtos.BlockMerkleTree> blockList = Arrays.asList(blocks);
return ContainerProtos.ContainerMerkleTree.newBuilder()
.addAllBlockMerkleTree(blockList)
.setDataChecksum(computeExpectedChecksum(
blockList.stream()
.map(ContainerProtos.BlockMerkleTree::getDataChecksum)
.collect(Collectors.toList())))
.build();
}
private ContainerProtos.BlockMerkleTree buildExpectedBlockTree(long blockID,
ContainerProtos.ChunkMerkleTree... chunks) {
List<ContainerProtos.ChunkMerkleTree> chunkList = Arrays.asList(chunks);
List<Long> itemsToChecksum = chunkList.stream().map(ContainerProtos.ChunkMerkleTree::getDataChecksum)
.collect(Collectors.toList());
itemsToChecksum.add(0, blockID);
return ContainerProtos.BlockMerkleTree.newBuilder()
.setBlockID(blockID)
.setDataChecksum(computeExpectedChecksum(itemsToChecksum))
.addAllChunkMerkleTree(chunkList)
.setDeleted(false)
.build();
}
private ContainerProtos.BlockMerkleTree buildExpectedDeletedBlockTree(long blockID, long dataChecksum) {
return ContainerProtos.BlockMerkleTree.newBuilder()
.setBlockID(blockID)
.setDataChecksum(dataChecksum)
.setDeleted(true)
// Deleted blocks should not have chunk merkle trees
.build();
}
private ContainerProtos.ChunkMerkleTree buildExpectedChunkTree(ContainerProtos.ChunkInfo chunk) {
return buildExpectedChunkTree(chunk, true);
}
private ContainerProtos.ChunkMerkleTree buildExpectedChunkTree(ContainerProtos.ChunkInfo chunk,
boolean checksumMatches) {
return ContainerProtos.ChunkMerkleTree.newBuilder()
.setOffset(chunk.getOffset())
.setLength(chunk.getLen())
.setDataChecksum(computeExpectedChunkChecksum(chunk.getChecksumData().getChecksumsList()))
.setChecksumMatches(checksumMatches)
.build();
}
private long computeExpectedChecksum(List<Long> checksums) {
// Use the same checksum implementation as the tree writer under test.
ChecksumByteBuffer checksumImpl = ContainerMerkleTreeWriter.CHECKSUM_BUFFER_SUPPLIER.get();
ByteBuffer longBuffer = ByteBuffer.allocate(Long.BYTES * checksums.size());
checksums.forEach(longBuffer::putLong);
longBuffer.flip();
checksumImpl.update(longBuffer);
return checksumImpl.getValue();
}
private long computeExpectedChunkChecksum(List<ByteString> checksums) {
// Use the same checksum implementation as the tree writer under test.
ChecksumByteBuffer checksumImpl = ContainerMerkleTreeWriter.CHECKSUM_BUFFER_SUPPLIER.get();
checksums.forEach(b -> checksumImpl.update(b.asReadOnlyByteBuffer()));
return checksumImpl.getValue();
}
}
|
apache/rocketmq | 37,977 | controller/src/main/java/org/apache/rocketmq/controller/impl/manager/ReplicasInfoManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.controller.impl.manager;
import com.caucho.hessian.io.Hessian2Input;
import com.caucho.hessian.io.Hessian2Output;
import com.caucho.hessian.io.SerializerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import org.apache.commons.lang3.StringUtils;
import org.apache.rocketmq.common.ControllerConfig;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.constant.LoggerName;
import org.apache.rocketmq.controller.elect.ElectPolicy;
import org.apache.rocketmq.controller.helper.BrokerValidPredicate;
import org.apache.rocketmq.controller.impl.event.AlterSyncStateSetEvent;
import org.apache.rocketmq.controller.impl.event.ApplyBrokerIdEvent;
import org.apache.rocketmq.controller.impl.event.CleanBrokerDataEvent;
import org.apache.rocketmq.controller.impl.event.ControllerResult;
import org.apache.rocketmq.controller.impl.event.ElectMasterEvent;
import org.apache.rocketmq.controller.impl.event.EventMessage;
import org.apache.rocketmq.controller.impl.event.EventType;
import org.apache.rocketmq.controller.impl.event.UpdateBrokerAddressEvent;
import org.apache.rocketmq.logging.org.slf4j.Logger;
import org.apache.rocketmq.logging.org.slf4j.LoggerFactory;
import org.apache.rocketmq.remoting.protocol.ResponseCode;
import org.apache.rocketmq.remoting.protocol.body.BrokerMemberGroup;
import org.apache.rocketmq.remoting.protocol.body.BrokerReplicasInfo;
import org.apache.rocketmq.remoting.protocol.body.ElectMasterResponseBody;
import org.apache.rocketmq.remoting.protocol.body.SyncStateSet;
import org.apache.rocketmq.remoting.protocol.header.controller.AlterSyncStateSetRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.AlterSyncStateSetResponseHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.ElectMasterRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.ElectMasterResponseHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.GetReplicaInfoRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.GetReplicaInfoResponseHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.admin.CleanControllerBrokerDataRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.register.ApplyBrokerIdRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.register.ApplyBrokerIdResponseHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.register.GetNextBrokerIdRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.register.GetNextBrokerIdResponseHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.register.RegisterBrokerToControllerRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.controller.register.RegisterBrokerToControllerResponseHeader;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* The manager that manages the replicas info for all brokers. We can think of this class as the controller's memory
* state machine. If the upper layer want to update the statemachine, it must sequentially call its methods.
*/
public class ReplicasInfoManager {
private static final Logger LOGGER = LoggerFactory.getLogger(LoggerName.CONTROLLER_LOGGER_NAME);
protected static final SerializerFactory SERIALIZER_FACTORY = new SerializerFactory();
protected final ControllerConfig controllerConfig;
private final Map<String/* brokerName */, BrokerReplicaInfo> replicaInfoTable;
private final Map<String/* brokerName */, SyncStateInfo> syncStateSetInfoTable;
protected static byte[] hessianSerialize(Object object) throws IOException {
try (ByteArrayOutputStream bout = new ByteArrayOutputStream()) {
Hessian2Output hessianOut = new Hessian2Output(bout);
hessianOut.setSerializerFactory(SERIALIZER_FACTORY);
hessianOut.writeObject(object);
hessianOut.close();
return bout.toByteArray();
}
}
protected static Object hessianDeserialize(byte[] data) throws IOException {
try (ByteArrayInputStream bin = new ByteArrayInputStream(data, 0, data.length)) {
Hessian2Input hin = new Hessian2Input(bin);
hin.setSerializerFactory(new SerializerFactory());
Object o = hin.readObject();
hin.close();
return o;
}
}
public ReplicasInfoManager(final ControllerConfig config) {
this.controllerConfig = config;
this.replicaInfoTable = new ConcurrentHashMap<String, BrokerReplicaInfo>();
this.syncStateSetInfoTable = new ConcurrentHashMap<String, SyncStateInfo>();
}
public ControllerResult<AlterSyncStateSetResponseHeader> alterSyncStateSet(
final AlterSyncStateSetRequestHeader request, final SyncStateSet syncStateSet,
final BrokerValidPredicate brokerAlivePredicate) {
final String brokerName = request.getBrokerName();
final ControllerResult<AlterSyncStateSetResponseHeader> result = new ControllerResult<>(new AlterSyncStateSetResponseHeader());
final AlterSyncStateSetResponseHeader response = result.getResponse();
if (!isContainsBroker(brokerName)) {
result.setCodeAndRemark(ResponseCode.CONTROLLER_ALTER_SYNC_STATE_SET_FAILED, "Broker metadata is not existed");
return result;
}
final Set<Long> newSyncStateSet = syncStateSet.getSyncStateSet();
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
// Check whether the oldSyncStateSet is equal with newSyncStateSet
final Set<Long> oldSyncStateSet = syncStateInfo.getSyncStateSet();
if (oldSyncStateSet.size() == newSyncStateSet.size() && oldSyncStateSet.containsAll(newSyncStateSet)) {
String err = "The newSyncStateSet is equal with oldSyncStateSet, no needed to update syncStateSet";
LOGGER.warn("{}", err);
result.setCodeAndRemark(ResponseCode.CONTROLLER_ALTER_SYNC_STATE_SET_FAILED, err);
return result;
}
// Check master
if (syncStateInfo.getMasterBrokerId() == null || !syncStateInfo.getMasterBrokerId().equals(request.getMasterBrokerId())) {
String err = String.format("Rejecting alter syncStateSet request because the current leader is:{%s}, not {%s}",
syncStateInfo.getMasterBrokerId(), request.getMasterBrokerId());
LOGGER.error("{}", err);
result.setCodeAndRemark(ResponseCode.CONTROLLER_INVALID_MASTER, err);
return result;
}
// Check master epoch
if (request.getMasterEpoch() != syncStateInfo.getMasterEpoch()) {
String err = String.format("Rejecting alter syncStateSet request because the current master epoch is:{%d}, not {%d}",
syncStateInfo.getMasterEpoch(), request.getMasterEpoch());
LOGGER.error("{}", err);
result.setCodeAndRemark(ResponseCode.CONTROLLER_FENCED_MASTER_EPOCH, err);
return result;
}
// Check syncStateSet epoch
if (syncStateSet.getSyncStateSetEpoch() != syncStateInfo.getSyncStateSetEpoch()) {
String err = String.format("Rejecting alter syncStateSet request because the current syncStateSet epoch is:{%d}, not {%d}",
syncStateInfo.getSyncStateSetEpoch(), syncStateSet.getSyncStateSetEpoch());
LOGGER.error("{}", err);
result.setCodeAndRemark(ResponseCode.CONTROLLER_FENCED_SYNC_STATE_SET_EPOCH, err);
return result;
}
// Check newSyncStateSet correctness
for (Long replica : newSyncStateSet) {
if (!brokerReplicaInfo.isBrokerExist(replica)) {
String err = String.format("Rejecting alter syncStateSet request because the replicas {%s} don't exist", replica);
LOGGER.error("{}", err);
result.setCodeAndRemark(ResponseCode.CONTROLLER_INVALID_REPLICAS, err);
return result;
}
if (!brokerAlivePredicate.check(brokerReplicaInfo.getClusterName(), brokerReplicaInfo.getBrokerName(), replica)) {
String err = String.format("Rejecting alter syncStateSet request because the replicas {%s} don't alive", replica);
LOGGER.error(err);
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_NOT_ALIVE, err);
return result;
}
}
if (!newSyncStateSet.contains(syncStateInfo.getMasterBrokerId())) {
String err = String.format("Rejecting alter syncStateSet request because the newSyncStateSet don't contains origin leader {%s}", syncStateInfo.getMasterBrokerId());
LOGGER.error(err);
result.setCodeAndRemark(ResponseCode.CONTROLLER_ALTER_SYNC_STATE_SET_FAILED, err);
return result;
}
// Generate event
int epoch = syncStateInfo.getSyncStateSetEpoch() + 1;
response.setNewSyncStateSetEpoch(epoch);
result.setBody(new SyncStateSet(newSyncStateSet, epoch).encode());
final AlterSyncStateSetEvent event = new AlterSyncStateSetEvent(brokerName, newSyncStateSet);
result.addEvent(event);
return result;
}
public ControllerResult<ElectMasterResponseHeader> electMaster(final ElectMasterRequestHeader request,
final ElectPolicy electPolicy) {
final String brokerName = request.getBrokerName();
final Long brokerId = request.getBrokerId();
final ControllerResult<ElectMasterResponseHeader> result = new ControllerResult<>(new ElectMasterResponseHeader());
final ElectMasterResponseHeader response = result.getResponse();
if (!isContainsBroker(brokerName)) {
// this broker set hasn't been registered
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_NEED_TO_BE_REGISTERED, "Broker hasn't been registered");
return result;
}
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final Set<Long> syncStateSet = syncStateInfo.getSyncStateSet();
final Long oldMaster = syncStateInfo.getMasterBrokerId();
Set<Long> allReplicaBrokers = controllerConfig.isEnableElectUncleanMaster() ? brokerReplicaInfo.getAllBroker() : null;
Long newMaster = null;
if (syncStateInfo.isFirstTimeForElect()) {
// If never have a master in this broker set, in other words, it is the first time to elect a master
// elect it as the first master
newMaster = brokerId;
}
// elect by policy
if (newMaster == null || newMaster == -1) {
// we should assign this assignedBrokerId when the brokerAddress need to be elected by force
Long assignedBrokerId = request.getDesignateElect() ? brokerId : null;
newMaster = electPolicy.elect(brokerReplicaInfo.getClusterName(), brokerReplicaInfo.getBrokerName(), syncStateSet, allReplicaBrokers, oldMaster, assignedBrokerId);
}
if (newMaster != null && newMaster.equals(oldMaster)) {
// old master still valid, change nothing
String err = String.format("The old master %s is still alive, not need to elect new master for broker %s", oldMaster, brokerReplicaInfo.getBrokerName());
LOGGER.warn("{}", err);
// the master still exist
response.setMasterEpoch(syncStateInfo.getMasterEpoch());
response.setSyncStateSetEpoch(syncStateInfo.getSyncStateSetEpoch());
response.setMasterBrokerId(oldMaster);
response.setMasterAddress(brokerReplicaInfo.getBrokerAddress(oldMaster));
result.setBody(new ElectMasterResponseBody(syncStateSet).encode());
result.setCodeAndRemark(ResponseCode.CONTROLLER_MASTER_STILL_EXIST, err);
return result;
}
// a new master is elected
if (newMaster != null) {
final int masterEpoch = syncStateInfo.getMasterEpoch();
final int syncStateSetEpoch = syncStateInfo.getSyncStateSetEpoch();
final HashSet<Long> newSyncStateSet = new HashSet<>();
newSyncStateSet.add(newMaster);
response.setMasterBrokerId(newMaster);
response.setMasterAddress(brokerReplicaInfo.getBrokerAddress(newMaster));
response.setMasterEpoch(masterEpoch + 1);
response.setSyncStateSetEpoch(syncStateSetEpoch + 1);
ElectMasterResponseBody responseBody = new ElectMasterResponseBody(newSyncStateSet);
BrokerMemberGroup brokerMemberGroup = buildBrokerMemberGroup(brokerReplicaInfo);
if (null != brokerMemberGroup) {
responseBody.setBrokerMemberGroup(brokerMemberGroup);
}
result.setBody(responseBody.encode());
final ElectMasterEvent event = new ElectMasterEvent(brokerName, newMaster);
result.addEvent(event);
LOGGER.info("Elect new master {} for broker {}", newMaster, brokerName);
return result;
}
// If elect failed and the electMaster is triggered by controller (we can figure it out by brokerAddress),
// we still need to apply an ElectMasterEvent to tell the statemachine
// that the master was shutdown and no new master was elected.
if (request.getBrokerId() == null || request.getBrokerId() == -1) {
final ElectMasterEvent event = new ElectMasterEvent(false, brokerName);
result.addEvent(event);
result.setCodeAndRemark(ResponseCode.CONTROLLER_MASTER_NOT_AVAILABLE, "Old master has down and failed to elect a new broker master");
} else {
result.setCodeAndRemark(ResponseCode.CONTROLLER_ELECT_MASTER_FAILED, "Failed to elect a new master");
}
LOGGER.warn("Failed to elect a new master for broker {}", brokerName);
return result;
}
private BrokerMemberGroup buildBrokerMemberGroup(final BrokerReplicaInfo brokerReplicaInfo) {
if (brokerReplicaInfo != null) {
final BrokerMemberGroup group = new BrokerMemberGroup(brokerReplicaInfo.getClusterName(), brokerReplicaInfo.getBrokerName());
final Map<Long, String> brokerIdTable = brokerReplicaInfo.getBrokerIdTable();
final Map<Long, String> memberGroup = new HashMap<>();
brokerIdTable.forEach((id, addr) -> memberGroup.put(id, addr));
group.setBrokerAddrs(memberGroup);
return group;
}
return null;
}
public ControllerResult<GetNextBrokerIdResponseHeader> getNextBrokerId(final GetNextBrokerIdRequestHeader request) {
final String clusterName = request.getClusterName();
final String brokerName = request.getBrokerName();
BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final ControllerResult<GetNextBrokerIdResponseHeader> result = new ControllerResult<>(new GetNextBrokerIdResponseHeader(clusterName, brokerName));
final GetNextBrokerIdResponseHeader response = result.getResponse();
if (brokerReplicaInfo == null) {
// means that none of brokers in this broker-set are registered
response.setNextBrokerId(MixAll.FIRST_BROKER_CONTROLLER_ID);
} else {
response.setNextBrokerId(brokerReplicaInfo.getNextAssignBrokerId());
}
return result;
}
public ControllerResult<ApplyBrokerIdResponseHeader> applyBrokerId(final ApplyBrokerIdRequestHeader request) {
final String clusterName = request.getClusterName();
final String brokerName = request.getBrokerName();
final Long brokerId = request.getAppliedBrokerId();
final String registerCheckCode = request.getRegisterCheckCode();
final String brokerAddress = registerCheckCode.split(";")[0];
BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final ControllerResult<ApplyBrokerIdResponseHeader> result = new ControllerResult<>(new ApplyBrokerIdResponseHeader(clusterName, brokerName));
final ApplyBrokerIdEvent event = new ApplyBrokerIdEvent(clusterName, brokerName, brokerAddress, brokerId, registerCheckCode);
// broker-set unregistered
if (brokerReplicaInfo == null) {
// first brokerId
if (brokerId == MixAll.FIRST_BROKER_CONTROLLER_ID) {
result.addEvent(event);
} else {
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_ID_INVALID, String.format("Broker-set: %s hasn't been registered in controller, but broker try to apply brokerId: %d", brokerName, brokerId));
}
return result;
}
// broker-set registered
if (!brokerReplicaInfo.isBrokerExist(brokerId) || registerCheckCode.equals(brokerReplicaInfo.getBrokerRegisterCheckCode(brokerId))) {
// if brokerId hasn't been assigned or brokerId was assigned to this broker
result.addEvent(event);
return result;
}
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_ID_INVALID, String.format("Fail to apply brokerId: %d in broker-set: %s", brokerId, brokerName));
return result;
}
public ControllerResult<RegisterBrokerToControllerResponseHeader> registerBroker(
final RegisterBrokerToControllerRequestHeader request, final BrokerValidPredicate alivePredicate) {
final String brokerAddress = request.getBrokerAddress();
final String brokerName = request.getBrokerName();
final String clusterName = request.getClusterName();
final Long brokerId = request.getBrokerId();
final ControllerResult<RegisterBrokerToControllerResponseHeader> result = new ControllerResult<>(new RegisterBrokerToControllerResponseHeader(clusterName, brokerName));
final RegisterBrokerToControllerResponseHeader response = result.getResponse();
if (!isContainsBroker(brokerName)) {
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_NEED_TO_BE_REGISTERED, String.format("Broker-set: %s hasn't been registered in controller", brokerName));
return result;
}
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
if (!brokerReplicaInfo.isBrokerExist(brokerId)) {
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_NEED_TO_BE_REGISTERED, String.format("BrokerId: %d hasn't been registered in broker-set: %s", brokerId, brokerName));
return result;
}
if (syncStateInfo.isMasterExist() && alivePredicate.check(clusterName, brokerName, syncStateInfo.getMasterBrokerId())) {
// if master still exist
response.setMasterBrokerId(syncStateInfo.getMasterBrokerId());
response.setMasterAddress(brokerReplicaInfo.getBrokerAddress(response.getMasterBrokerId()));
response.setMasterEpoch(syncStateInfo.getMasterEpoch());
response.setSyncStateSetEpoch(syncStateInfo.getSyncStateSetEpoch());
}
result.setBody(new SyncStateSet(syncStateInfo.getSyncStateSet(), syncStateInfo.getSyncStateSetEpoch()).encode());
// if this broker's address has been changed, we need to update it
if (!brokerAddress.equals(brokerReplicaInfo.getBrokerAddress(brokerId))) {
final UpdateBrokerAddressEvent event = new UpdateBrokerAddressEvent(clusterName, brokerName, brokerAddress, brokerId);
result.addEvent(event);
}
return result;
}
public ControllerResult<GetReplicaInfoResponseHeader> getReplicaInfo(final GetReplicaInfoRequestHeader request) {
final String brokerName = request.getBrokerName();
final ControllerResult<GetReplicaInfoResponseHeader> result = new ControllerResult<>(new GetReplicaInfoResponseHeader());
final GetReplicaInfoResponseHeader response = result.getResponse();
if (isContainsBroker(brokerName)) {
// If exist broker metadata, just return metadata
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final Long masterBrokerId = syncStateInfo.getMasterBrokerId();
response.setMasterBrokerId(masterBrokerId);
response.setMasterAddress(brokerReplicaInfo.getBrokerAddress(masterBrokerId));
response.setMasterEpoch(syncStateInfo.getMasterEpoch());
result.setBody(new SyncStateSet(syncStateInfo.getSyncStateSet(), syncStateInfo.getSyncStateSetEpoch()).encode());
return result;
}
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_METADATA_NOT_EXIST, "Broker metadata is not existed");
return result;
}
public ControllerResult<Void> getSyncStateData(final List<String> brokerNames,
final BrokerValidPredicate brokerAlivePredicate) {
final ControllerResult<Void> result = new ControllerResult<>();
final BrokerReplicasInfo brokerReplicasInfo = new BrokerReplicasInfo();
for (String brokerName : brokerNames) {
if (isContainsBroker(brokerName)) {
// If exist broker metadata, just return metadata
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final Set<Long> syncStateSet = syncStateInfo.getSyncStateSet();
final Long masterBrokerId = syncStateInfo.getMasterBrokerId();
final ArrayList<BrokerReplicasInfo.ReplicaIdentity> inSyncReplicas = new ArrayList<>();
final ArrayList<BrokerReplicasInfo.ReplicaIdentity> notInSyncReplicas = new ArrayList<>();
if (brokerReplicaInfo == null) {
continue;
}
brokerReplicaInfo.getBrokerIdTable().forEach((brokerId, brokerAddress) -> {
Boolean isAlive = brokerAlivePredicate.check(brokerReplicaInfo.getClusterName(), brokerName, brokerId);
BrokerReplicasInfo.ReplicaIdentity replica = new BrokerReplicasInfo.ReplicaIdentity(brokerName, brokerId, brokerAddress);
replica.setAlive(isAlive);
if (syncStateSet.contains(brokerId)) {
inSyncReplicas.add(replica);
} else {
notInSyncReplicas.add(replica);
}
});
final BrokerReplicasInfo.ReplicasInfo inSyncState = new BrokerReplicasInfo.ReplicasInfo(masterBrokerId, brokerReplicaInfo.getBrokerAddress(masterBrokerId), syncStateInfo.getMasterEpoch(), syncStateInfo.getSyncStateSetEpoch(),
inSyncReplicas, notInSyncReplicas);
brokerReplicasInfo.addReplicaInfo(brokerName, inSyncState);
}
}
result.setBody(brokerReplicasInfo.encode());
return result;
}
public ControllerResult<Void> cleanBrokerData(final CleanControllerBrokerDataRequestHeader requestHeader,
final BrokerValidPredicate validPredicate) {
final ControllerResult<Void> result = new ControllerResult<>();
final String clusterName = requestHeader.getClusterName();
final String brokerName = requestHeader.getBrokerName();
final String brokerControllerIdsToClean = requestHeader.getBrokerControllerIdsToClean();
Set<Long> brokerIdSet = null;
if (!requestHeader.isCleanLivingBroker()) {
//if SyncStateInfo.masterAddress is not empty, at least one broker with the same BrokerName is alive
SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
if (StringUtils.isBlank(brokerControllerIdsToClean) && null != syncStateInfo && syncStateInfo.getMasterBrokerId() != null) {
String remark = String.format("Broker %s is still alive, clean up failure", requestHeader.getBrokerName());
result.setCodeAndRemark(ResponseCode.CONTROLLER_INVALID_CLEAN_BROKER_METADATA, remark);
return result;
}
if (StringUtils.isNotBlank(brokerControllerIdsToClean)) {
try {
brokerIdSet = Stream.of(brokerControllerIdsToClean.split(";")).map(idStr -> Long.valueOf(idStr)).collect(Collectors.toSet());
} catch (NumberFormatException numberFormatException) {
String remark = String.format("Please set the option <brokerControllerIdsToClean> according to the format, exception: %s", numberFormatException);
result.setCodeAndRemark(ResponseCode.CONTROLLER_INVALID_CLEAN_BROKER_METADATA, remark);
return result;
}
for (Long brokerId : brokerIdSet) {
if (validPredicate.check(clusterName, brokerName, brokerId)) {
String remark = String.format("Broker [%s, %s] is still alive, clean up failure", requestHeader.getBrokerName(), brokerId);
result.setCodeAndRemark(ResponseCode.CONTROLLER_INVALID_CLEAN_BROKER_METADATA, remark);
return result;
}
}
}
}
if (isContainsBroker(brokerName)) {
final CleanBrokerDataEvent event = new CleanBrokerDataEvent(brokerName, brokerIdSet);
result.addEvent(event);
return result;
}
result.setCodeAndRemark(ResponseCode.CONTROLLER_INVALID_CLEAN_BROKER_METADATA, String.format("Broker %s is not existed,clean broker data failure.", brokerName));
return result;
}
public List<String/*BrokerName*/> scanNeedReelectBrokerSets(final BrokerValidPredicate validPredicate) {
List<String> needReelectBrokerSets = new LinkedList<>();
this.syncStateSetInfoTable.forEach((brokerName, syncStateInfo) -> {
Long masterBrokerId = syncStateInfo.getMasterBrokerId();
String clusterName = syncStateInfo.getClusterName();
// Now master is inactive
if (masterBrokerId != null && !validPredicate.check(clusterName, brokerName, masterBrokerId)) {
// Still at least one broker alive
Set<Long> brokerIds = this.replicaInfoTable.get(brokerName).getBrokerIdTable().keySet();
boolean alive = brokerIds.stream().anyMatch(id -> validPredicate.check(clusterName, brokerName, id));
if (alive) {
needReelectBrokerSets.add(brokerName);
}
}
});
return needReelectBrokerSets;
}
/**
* Apply events to memory statemachine.
*
* @param event event message
*/
public void applyEvent(final EventMessage event) {
final EventType type = event.getEventType();
switch (type) {
case ALTER_SYNC_STATE_SET_EVENT:
handleAlterSyncStateSet((AlterSyncStateSetEvent) event);
break;
case APPLY_BROKER_ID_EVENT:
handleApplyBrokerId((ApplyBrokerIdEvent) event);
break;
case ELECT_MASTER_EVENT:
handleElectMaster((ElectMasterEvent) event);
break;
case CLEAN_BROKER_DATA_EVENT:
handleCleanBrokerDataEvent((CleanBrokerDataEvent) event);
break;
case UPDATE_BROKER_ADDRESS:
handleUpdateBrokerAddress((UpdateBrokerAddressEvent) event);
break;
default:
break;
}
}
private void handleAlterSyncStateSet(final AlterSyncStateSetEvent event) {
final String brokerName = event.getBrokerName();
if (isContainsBroker(brokerName)) {
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
syncStateInfo.updateSyncStateSetInfo(event.getNewSyncStateSet());
}
}
private void handleApplyBrokerId(final ApplyBrokerIdEvent event) {
final String brokerName = event.getBrokerName();
if (isContainsBroker(brokerName)) {
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
if (!brokerReplicaInfo.isBrokerExist(event.getNewBrokerId())) {
brokerReplicaInfo.addBroker(event.getNewBrokerId(), event.getBrokerAddress(), event.getRegisterCheckCode());
}
} else {
// First time to register in this broker set
// Initialize the replicaInfo about this broker set
final String clusterName = event.getClusterName();
final BrokerReplicaInfo brokerReplicaInfo = new BrokerReplicaInfo(clusterName, brokerName);
brokerReplicaInfo.addBroker(event.getNewBrokerId(), event.getBrokerAddress(), event.getRegisterCheckCode());
this.replicaInfoTable.put(brokerName, brokerReplicaInfo);
final SyncStateInfo syncStateInfo = new SyncStateInfo(clusterName, brokerName);
// Initialize an empty syncStateInfo for this broker set
this.syncStateSetInfoTable.put(brokerName, syncStateInfo);
}
}
private void handleUpdateBrokerAddress(final UpdateBrokerAddressEvent event) {
final String brokerName = event.getBrokerName();
final String brokerAddress = event.getBrokerAddress();
final Long brokerId = event.getBrokerId();
BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
brokerReplicaInfo.updateBrokerAddress(brokerId, brokerAddress);
}
private void handleElectMaster(final ElectMasterEvent event) {
final String brokerName = event.getBrokerName();
final Long newMaster = event.getNewMasterBrokerId();
if (isContainsBroker(brokerName)) {
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
if (event.getNewMasterElected()) {
// Record new master
syncStateInfo.updateMasterInfo(newMaster);
// Record new newSyncStateSet list
final HashSet<Long> newSyncStateSet = new HashSet<>();
newSyncStateSet.add(newMaster);
syncStateInfo.updateSyncStateSetInfo(newSyncStateSet);
} else {
// If new master was not elected, which means old master was shutdown and the newSyncStateSet list had no more replicas
// So we should delete old master, but retain newSyncStateSet list.
syncStateInfo.updateMasterInfo(null);
}
return;
}
LOGGER.error("Receive an ElectMasterEvent which contains the un-registered broker, event = {}", event);
}
private void handleCleanBrokerDataEvent(final CleanBrokerDataEvent event) {
final String brokerName = event.getBrokerName();
final Set<Long> brokerIdSetToClean = event.getBrokerIdSetToClean();
if (null == brokerIdSetToClean || brokerIdSetToClean.isEmpty()) {
this.replicaInfoTable.remove(brokerName);
this.syncStateSetInfoTable.remove(brokerName);
return;
}
if (!isContainsBroker(brokerName)) {
return;
}
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
for (Long brokerId : brokerIdSetToClean) {
brokerReplicaInfo.removeBrokerId(brokerId);
syncStateInfo.removeFromSyncState(brokerId);
}
if (brokerReplicaInfo.getBrokerIdTable().isEmpty()) {
this.replicaInfoTable.remove(brokerName);
}
if (syncStateInfo.getSyncStateSet().isEmpty()) {
this.syncStateSetInfoTable.remove(brokerName);
}
}
/**
* Is the broker existed in the memory metadata
*
* @return true if both existed in replicaInfoTable and inSyncReplicasInfoTable
*/
private boolean isContainsBroker(final String brokerName) {
return this.replicaInfoTable.containsKey(brokerName) && this.syncStateSetInfoTable.containsKey(brokerName);
}
protected void putInt(ByteArrayOutputStream outputStream, int value) {
outputStream.write((byte) (value >>> 24));
outputStream.write((byte) (value >>> 16));
outputStream.write((byte) (value >>> 8));
outputStream.write((byte) value);
}
protected int getInt(byte[] memory, int index) {
return memory[index] << 24 | (memory[index + 1] & 0xFF) << 16 | (memory[index + 2] & 0xFF) << 8 | memory[index + 3] & 0xFF;
}
public byte[] serialize() throws Throwable {
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
putInt(outputStream, this.replicaInfoTable.size());
for (Map.Entry<String, BrokerReplicaInfo> entry : replicaInfoTable.entrySet()) {
final byte[] brokerName = entry.getKey().getBytes(StandardCharsets.UTF_8);
byte[] brokerReplicaInfo = hessianSerialize(entry.getValue());
putInt(outputStream, brokerName.length);
outputStream.write(brokerName);
putInt(outputStream, brokerReplicaInfo.length);
outputStream.write(brokerReplicaInfo);
}
putInt(outputStream, this.syncStateSetInfoTable.size());
for (Map.Entry<String, SyncStateInfo> entry : syncStateSetInfoTable.entrySet()) {
final byte[] brokerName = entry.getKey().getBytes(StandardCharsets.UTF_8);
byte[] syncStateInfo = hessianSerialize(entry.getValue());
putInt(outputStream, brokerName.length);
outputStream.write(brokerName);
putInt(outputStream, syncStateInfo.length);
outputStream.write(syncStateInfo);
}
return outputStream.toByteArray();
} catch (Throwable e) {
LOGGER.error("serialize replicaInfoTable or syncStateSetInfoTable error", e);
throw e;
}
}
public void deserializeFrom(byte[] data) throws Throwable {
int index = 0;
this.replicaInfoTable.clear();
this.syncStateSetInfoTable.clear();
try {
int replicaInfoTableSize = getInt(data, index);
index += 4;
for (int i = 0; i < replicaInfoTableSize; i++) {
int brokerNameLength = getInt(data, index);
index += 4;
String brokerName = new String(data, index, brokerNameLength, StandardCharsets.UTF_8);
index += brokerNameLength;
int brokerReplicaInfoLength = getInt(data, index);
index += 4;
byte[] brokerReplicaInfoArray = new byte[brokerReplicaInfoLength];
System.arraycopy(data, index, brokerReplicaInfoArray, 0, brokerReplicaInfoLength);
BrokerReplicaInfo brokerReplicaInfo = (BrokerReplicaInfo) hessianDeserialize(brokerReplicaInfoArray);
index += brokerReplicaInfoLength;
this.replicaInfoTable.put(brokerName, brokerReplicaInfo);
}
int syncStateSetInfoTableSize = getInt(data, index);
index += 4;
for (int i = 0; i < syncStateSetInfoTableSize; i++) {
int brokerNameLength = getInt(data, index);
index += 4;
String brokerName = new String(data, index, brokerNameLength, StandardCharsets.UTF_8);
index += brokerNameLength;
int syncStateInfoLength = getInt(data, index);
index += 4;
byte[] syncStateInfoArray = new byte[syncStateInfoLength];
System.arraycopy(data, index, syncStateInfoArray, 0, syncStateInfoLength);
SyncStateInfo syncStateInfo = (SyncStateInfo) hessianDeserialize(syncStateInfoArray);
index += syncStateInfoLength;
this.syncStateSetInfoTable.put(brokerName, syncStateInfo);
}
} catch (Throwable e) {
LOGGER.error("deserialize replicaInfoTable or syncStateSetInfoTable error", e);
throw e;
}
}
}
|
googleapis/google-cloud-java | 37,499 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/SearchDataItemsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/dataset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [DatasetService.SearchDataItems][google.cloud.aiplatform.v1.DatasetService.SearchDataItems].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.SearchDataItemsResponse}
*/
public final class SearchDataItemsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.SearchDataItemsResponse)
SearchDataItemsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchDataItemsResponse.newBuilder() to construct.
private SearchDataItemsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchDataItemsResponse() {
dataItemViews_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchDataItemsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_SearchDataItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_SearchDataItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.SearchDataItemsResponse.class,
com.google.cloud.aiplatform.v1.SearchDataItemsResponse.Builder.class);
}
public static final int DATA_ITEM_VIEWS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.DataItemView> dataItemViews_;
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.DataItemView> getDataItemViewsList() {
return dataItemViews_;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.DataItemViewOrBuilder>
getDataItemViewsOrBuilderList() {
return dataItemViews_;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
@java.lang.Override
public int getDataItemViewsCount() {
return dataItemViews_.size();
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.DataItemView getDataItemViews(int index) {
return dataItemViews_.get(index);
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.DataItemViewOrBuilder getDataItemViewsOrBuilder(int index) {
return dataItemViews_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [SearchDataItemsRequest.page_token][google.cloud.aiplatform.v1.SearchDataItemsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [SearchDataItemsRequest.page_token][google.cloud.aiplatform.v1.SearchDataItemsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dataItemViews_.size(); i++) {
output.writeMessage(1, dataItemViews_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dataItemViews_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dataItemViews_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.SearchDataItemsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.SearchDataItemsResponse other =
(com.google.cloud.aiplatform.v1.SearchDataItemsResponse) obj;
if (!getDataItemViewsList().equals(other.getDataItemViewsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDataItemViewsCount() > 0) {
hash = (37 * hash) + DATA_ITEM_VIEWS_FIELD_NUMBER;
hash = (53 * hash) + getDataItemViewsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.SearchDataItemsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [DatasetService.SearchDataItems][google.cloud.aiplatform.v1.DatasetService.SearchDataItems].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.SearchDataItemsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.SearchDataItemsResponse)
com.google.cloud.aiplatform.v1.SearchDataItemsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_SearchDataItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_SearchDataItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.SearchDataItemsResponse.class,
com.google.cloud.aiplatform.v1.SearchDataItemsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.SearchDataItemsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dataItemViewsBuilder_ == null) {
dataItemViews_ = java.util.Collections.emptyList();
} else {
dataItemViews_ = null;
dataItemViewsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_SearchDataItemsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SearchDataItemsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.SearchDataItemsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SearchDataItemsResponse build() {
com.google.cloud.aiplatform.v1.SearchDataItemsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SearchDataItemsResponse buildPartial() {
com.google.cloud.aiplatform.v1.SearchDataItemsResponse result =
new com.google.cloud.aiplatform.v1.SearchDataItemsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.SearchDataItemsResponse result) {
if (dataItemViewsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dataItemViews_ = java.util.Collections.unmodifiableList(dataItemViews_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dataItemViews_ = dataItemViews_;
} else {
result.dataItemViews_ = dataItemViewsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.SearchDataItemsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.SearchDataItemsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.SearchDataItemsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.SearchDataItemsResponse other) {
if (other == com.google.cloud.aiplatform.v1.SearchDataItemsResponse.getDefaultInstance())
return this;
if (dataItemViewsBuilder_ == null) {
if (!other.dataItemViews_.isEmpty()) {
if (dataItemViews_.isEmpty()) {
dataItemViews_ = other.dataItemViews_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDataItemViewsIsMutable();
dataItemViews_.addAll(other.dataItemViews_);
}
onChanged();
}
} else {
if (!other.dataItemViews_.isEmpty()) {
if (dataItemViewsBuilder_.isEmpty()) {
dataItemViewsBuilder_.dispose();
dataItemViewsBuilder_ = null;
dataItemViews_ = other.dataItemViews_;
bitField0_ = (bitField0_ & ~0x00000001);
dataItemViewsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDataItemViewsFieldBuilder()
: null;
} else {
dataItemViewsBuilder_.addAllMessages(other.dataItemViews_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.DataItemView m =
input.readMessage(
com.google.cloud.aiplatform.v1.DataItemView.parser(), extensionRegistry);
if (dataItemViewsBuilder_ == null) {
ensureDataItemViewsIsMutable();
dataItemViews_.add(m);
} else {
dataItemViewsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.DataItemView> dataItemViews_ =
java.util.Collections.emptyList();
private void ensureDataItemViewsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dataItemViews_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.DataItemView>(dataItemViews_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.DataItemView,
com.google.cloud.aiplatform.v1.DataItemView.Builder,
com.google.cloud.aiplatform.v1.DataItemViewOrBuilder>
dataItemViewsBuilder_;
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.DataItemView> getDataItemViewsList() {
if (dataItemViewsBuilder_ == null) {
return java.util.Collections.unmodifiableList(dataItemViews_);
} else {
return dataItemViewsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public int getDataItemViewsCount() {
if (dataItemViewsBuilder_ == null) {
return dataItemViews_.size();
} else {
return dataItemViewsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.DataItemView getDataItemViews(int index) {
if (dataItemViewsBuilder_ == null) {
return dataItemViews_.get(index);
} else {
return dataItemViewsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder setDataItemViews(int index, com.google.cloud.aiplatform.v1.DataItemView value) {
if (dataItemViewsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataItemViewsIsMutable();
dataItemViews_.set(index, value);
onChanged();
} else {
dataItemViewsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder setDataItemViews(
int index, com.google.cloud.aiplatform.v1.DataItemView.Builder builderForValue) {
if (dataItemViewsBuilder_ == null) {
ensureDataItemViewsIsMutable();
dataItemViews_.set(index, builderForValue.build());
onChanged();
} else {
dataItemViewsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder addDataItemViews(com.google.cloud.aiplatform.v1.DataItemView value) {
if (dataItemViewsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataItemViewsIsMutable();
dataItemViews_.add(value);
onChanged();
} else {
dataItemViewsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder addDataItemViews(int index, com.google.cloud.aiplatform.v1.DataItemView value) {
if (dataItemViewsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataItemViewsIsMutable();
dataItemViews_.add(index, value);
onChanged();
} else {
dataItemViewsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder addDataItemViews(
com.google.cloud.aiplatform.v1.DataItemView.Builder builderForValue) {
if (dataItemViewsBuilder_ == null) {
ensureDataItemViewsIsMutable();
dataItemViews_.add(builderForValue.build());
onChanged();
} else {
dataItemViewsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder addDataItemViews(
int index, com.google.cloud.aiplatform.v1.DataItemView.Builder builderForValue) {
if (dataItemViewsBuilder_ == null) {
ensureDataItemViewsIsMutable();
dataItemViews_.add(index, builderForValue.build());
onChanged();
} else {
dataItemViewsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder addAllDataItemViews(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.DataItemView> values) {
if (dataItemViewsBuilder_ == null) {
ensureDataItemViewsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dataItemViews_);
onChanged();
} else {
dataItemViewsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder clearDataItemViews() {
if (dataItemViewsBuilder_ == null) {
dataItemViews_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dataItemViewsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public Builder removeDataItemViews(int index) {
if (dataItemViewsBuilder_ == null) {
ensureDataItemViewsIsMutable();
dataItemViews_.remove(index);
onChanged();
} else {
dataItemViewsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.DataItemView.Builder getDataItemViewsBuilder(int index) {
return getDataItemViewsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.DataItemViewOrBuilder getDataItemViewsOrBuilder(
int index) {
if (dataItemViewsBuilder_ == null) {
return dataItemViews_.get(index);
} else {
return dataItemViewsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.DataItemViewOrBuilder>
getDataItemViewsOrBuilderList() {
if (dataItemViewsBuilder_ != null) {
return dataItemViewsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dataItemViews_);
}
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.DataItemView.Builder addDataItemViewsBuilder() {
return getDataItemViewsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.DataItemView.getDefaultInstance());
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.DataItemView.Builder addDataItemViewsBuilder(int index) {
return getDataItemViewsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.DataItemView.getDefaultInstance());
}
/**
*
*
* <pre>
* The DataItemViews read.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.DataItemView data_item_views = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.DataItemView.Builder>
getDataItemViewsBuilderList() {
return getDataItemViewsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.DataItemView,
com.google.cloud.aiplatform.v1.DataItemView.Builder,
com.google.cloud.aiplatform.v1.DataItemViewOrBuilder>
getDataItemViewsFieldBuilder() {
if (dataItemViewsBuilder_ == null) {
dataItemViewsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.DataItemView,
com.google.cloud.aiplatform.v1.DataItemView.Builder,
com.google.cloud.aiplatform.v1.DataItemViewOrBuilder>(
dataItemViews_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
dataItemViews_ = null;
}
return dataItemViewsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [SearchDataItemsRequest.page_token][google.cloud.aiplatform.v1.SearchDataItemsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [SearchDataItemsRequest.page_token][google.cloud.aiplatform.v1.SearchDataItemsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [SearchDataItemsRequest.page_token][google.cloud.aiplatform.v1.SearchDataItemsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [SearchDataItemsRequest.page_token][google.cloud.aiplatform.v1.SearchDataItemsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [SearchDataItemsRequest.page_token][google.cloud.aiplatform.v1.SearchDataItemsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.SearchDataItemsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.SearchDataItemsResponse)
private static final com.google.cloud.aiplatform.v1.SearchDataItemsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.SearchDataItemsResponse();
}
public static com.google.cloud.aiplatform.v1.SearchDataItemsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchDataItemsResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchDataItemsResponse>() {
@java.lang.Override
public SearchDataItemsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchDataItemsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchDataItemsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SearchDataItemsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 37,576 | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestQuotaByStorageType.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.SafeModeAction;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.QuotaByStorageTypeExceededException;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.assertj.core.api.Assertions.assertThat;
public class TestQuotaByStorageType {
private static final int BLOCKSIZE = 1024;
private static final short REPLICATION = 3;
private static final long seed = 0L;
private static final Path dir = new Path("/TestQuotaByStorageType");
private MiniDFSCluster cluster;
private FSDirectory fsdir;
private DistributedFileSystem dfs;
private FSNamesystem fsn;
protected static final Logger LOG =
LoggerFactory.getLogger(TestQuotaByStorageType.class);
@BeforeEach
public void setUp() throws Exception {
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCKSIZE);
// Setup a 3-node cluster and configure
// each node with 1 SSD and 1 DISK without capacity limitation
cluster = new MiniDFSCluster
.Builder(conf)
.numDataNodes(REPLICATION)
.storageTypes(new StorageType[]{StorageType.SSD, StorageType.DEFAULT})
.build();
cluster.waitActive();
refreshClusterState();
}
@AfterEach
public void tearDown() throws Exception {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
// Cluster state must be refreshed after each start/restart in the test
private void refreshClusterState() throws IOException{
fsdir = cluster.getNamesystem().getFSDirectory();
dfs = cluster.getFileSystem();
fsn = cluster.getNamesystem();
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithFileCreateOneSSD() throws Exception {
testQuotaByStorageTypeWithFileCreateCase(
HdfsConstants.ONESSD_STORAGE_POLICY_NAME,
StorageType.SSD,
(short)1);
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithFileCreateAllSSD() throws Exception {
testQuotaByStorageTypeWithFileCreateCase(
HdfsConstants.ALLSSD_STORAGE_POLICY_NAME,
StorageType.SSD,
(short)3);
}
void testQuotaByStorageTypeWithFileCreateCase(
String storagePolicy, StorageType storageType, short replication) throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
// set storage policy on directory "foo" to storagePolicy
dfs.setStoragePolicy(foo, storagePolicy);
// set quota by storage type on directory "foo"
dfs.setQuotaByStorageType(foo, storageType, BLOCKSIZE * 10);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2 + BLOCKSIZE / 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify space consumed and remaining quota
long storageTypeConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(storageType);
assertEquals(file1Len * replication, storageTypeConsumed);
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithFileCreateAppend() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
// set storage policy on directory "foo" to ONESSD
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// set quota by storage type on directory "foo"
dfs.setQuotaByStorageType(foo, StorageType.SSD, BLOCKSIZE * 4);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify space consumed and remaining quota
long ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
// append several blocks
int appendLen = BLOCKSIZE * 2;
DFSTestUtil.appendFile(dfs, createdFile1, appendLen);
file1Len += appendLen;
ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithFileCreateDelete() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// set quota by storage type on directory "foo"
dfs.setQuotaByStorageType(foo, StorageType.SSD, BLOCKSIZE * 10);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
// Create file of size 2.5 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2 + BLOCKSIZE / 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify space consumed and remaining quota
long storageTypeConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, storageTypeConsumed);
// Delete file and verify the consumed space of the storage type is updated
dfs.delete(createdFile1, false);
storageTypeConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(0, storageTypeConsumed);
QuotaCounts counts = fnode.computeQuotaUsage(
fsn.getBlockManager().getStoragePolicySuite(), true);
assertEquals(0, counts.getTypeSpaces().get(StorageType.SSD),
fnode.dumpTreeRecursively().toString());
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), 0);
assertEquals(cs.getTypeConsumed(StorageType.SSD), 0);
assertEquals(cs.getTypeConsumed(StorageType.DISK), 0);
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithFileCreateRename() throws Exception {
final Path foo = new Path(dir, "foo");
dfs.mkdirs(foo);
Path createdFile1foo = new Path(foo, "created_file1.data");
final Path bar = new Path(dir, "bar");
dfs.mkdirs(bar);
Path createdFile1bar = new Path(bar, "created_file1.data");
// set storage policy on directory "foo" and "bar" to ONESSD
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
dfs.setStoragePolicy(bar, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// set quota by storage type on directory "foo"
dfs.setQuotaByStorageType(foo, StorageType.SSD, BLOCKSIZE * 4);
dfs.setQuotaByStorageType(bar, StorageType.SSD, BLOCKSIZE * 2);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
// Create file of size 3 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 3;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1foo, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify space consumed and remaining quota
long ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
// move file from foo to bar
try {
dfs.rename(createdFile1foo, createdFile1bar);
fail("Should have failed with QuotaByStorageTypeExceededException ");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
}
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
}
/**
* Test if the quota can be correctly updated for create file even
* QuotaByStorageTypeExceededException is thrown
*/
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeExceptionWithFileCreate() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
dfs.setQuotaByStorageType(foo, StorageType.SSD, BLOCKSIZE * 4);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
// Create the 1st file of size 2 * BLOCKSIZE under directory "foo" and expect no exception
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
long currentSSDConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, currentSSDConsumed);
// Create the 2nd file of size 1.5 * BLOCKSIZE under directory "foo" and expect no exception
Path createdFile2 = new Path(foo, "created_file2.data");
long file2Len = BLOCKSIZE + BLOCKSIZE / 2;
DFSTestUtil.createFile(dfs, createdFile2, bufLen, file2Len, BLOCKSIZE, REPLICATION, seed);
currentSSDConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len + file2Len, currentSSDConsumed);
// Create the 3rd file of size BLOCKSIZE under directory "foo" and expect quota exceeded exception
Path createdFile3 = new Path(foo, "created_file3.data");
long file3Len = BLOCKSIZE;
try {
DFSTestUtil.createFile(dfs, createdFile3, bufLen, file3Len, BLOCKSIZE, REPLICATION, seed);
fail("Should have failed with QuotaByStorageTypeExceededException ");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
currentSSDConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len + file2Len, currentSSDConsumed);
}
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeParentOffChildOff() throws Exception {
final Path parent = new Path(dir, "parent");
final Path child = new Path(parent, "child");
dfs.mkdirs(parent);
dfs.mkdirs(child);
dfs.setStoragePolicy(parent, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// Create file of size 2.5 * BLOCKSIZE under child directory.
// Since both parent and child directory do not have SSD quota set,
// expect succeed without exception
Path createdFile1 = new Path(child, "created_file1.data");
long file1Len = BLOCKSIZE * 2 + BLOCKSIZE / 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE,
REPLICATION, seed);
// Verify SSD usage at the root level as both parent/child don't have DirectoryWithQuotaFeature
INode fnode = fsdir.getINode4Write("/");
long ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeParentOffChildOn() throws Exception {
final Path parent = new Path(dir, "parent");
final Path child = new Path(parent, "child");
dfs.mkdirs(parent);
dfs.mkdirs(child);
dfs.setStoragePolicy(parent, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
dfs.setQuotaByStorageType(child, StorageType.SSD, 2 * BLOCKSIZE);
// Create file of size 2.5 * BLOCKSIZE under child directory
// Since child directory have SSD quota of 2 * BLOCKSIZE,
// expect an exception when creating files under child directory.
Path createdFile1 = new Path(child, "created_file1.data");
long file1Len = BLOCKSIZE * 2 + BLOCKSIZE / 2;
int bufLen = BLOCKSIZE / 16;
try {
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE,
REPLICATION, seed);
fail("Should have failed with QuotaByStorageTypeExceededException ");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
}
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeParentOnChildOff() throws Exception {
short replication = 1;
final Path parent = new Path(dir, "parent");
final Path child = new Path(parent, "child");
dfs.mkdirs(parent);
dfs.mkdirs(child);
dfs.setStoragePolicy(parent, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
dfs.setQuotaByStorageType(parent, StorageType.SSD, 3 * BLOCKSIZE);
// Create file of size 2.5 * BLOCKSIZE under child directory
// Verify parent Quota applies
Path createdFile1 = new Path(child, "created_file1.data");
long file1Len = BLOCKSIZE * 2 + BLOCKSIZE / 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE,
replication, seed);
INode fnode = fsdir.getINode4Write(parent.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
long currentSSDConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, currentSSDConsumed);
// Create the 2nd file of size BLOCKSIZE under child directory and expect quota exceeded exception
Path createdFile2 = new Path(child, "created_file2.data");
long file2Len = BLOCKSIZE;
try {
DFSTestUtil.createFile(dfs, createdFile2, bufLen, file2Len, BLOCKSIZE, replication, seed);
fail("Should have failed with QuotaByStorageTypeExceededException ");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
currentSSDConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, currentSSDConsumed);
}
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeParentOnChildOn() throws Exception {
final Path parent = new Path(dir, "parent");
final Path child = new Path(parent, "child");
dfs.mkdirs(parent);
dfs.mkdirs(child);
dfs.setStoragePolicy(parent, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
dfs.setQuotaByStorageType(parent, StorageType.SSD, 2 * BLOCKSIZE);
dfs.setQuotaByStorageType(child, StorageType.SSD, 3 * BLOCKSIZE);
// Create file of size 2.5 * BLOCKSIZE under child directory
// Verify parent Quota applies
Path createdFile1 = new Path(child, "created_file1.data");
long file1Len = BLOCKSIZE * 2 + BLOCKSIZE / 2;
int bufLen = BLOCKSIZE / 16;
try {
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE,
REPLICATION, seed);
fail("Should have failed with QuotaByStorageTypeExceededException ");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
}
}
/**
* Both traditional space quota and the storage type quota for SSD are set and
* not exceeded.
*/
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithTraditionalQuota() throws Exception {
final Path foo = new Path(dir, "foo");
dfs.mkdirs(foo);
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
dfs.setQuotaByStorageType(foo, StorageType.SSD, BLOCKSIZE * 10);
dfs.setQuota(foo, Long.MAX_VALUE - 1, REPLICATION * BLOCKSIZE * 10);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
Path createdFile = new Path(foo, "created_file.data");
long fileLen = BLOCKSIZE * 2 + BLOCKSIZE / 2;
DFSTestUtil.createFile(dfs, createdFile, BLOCKSIZE / 16,
fileLen, BLOCKSIZE, REPLICATION, seed);
QuotaCounts cnt = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed();
assertEquals(2, cnt.getNameSpace());
assertEquals(fileLen * REPLICATION, cnt.getStorageSpace());
dfs.delete(createdFile, true);
QuotaCounts cntAfterDelete = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed();
assertEquals(1, cntAfterDelete.getNameSpace());
assertEquals(0, cntAfterDelete.getStorageSpace());
// Validate the computeQuotaUsage()
QuotaCounts counts = fnode.computeQuotaUsage(
fsn.getBlockManager().getStoragePolicySuite(), true);
assertEquals(1, counts.getNameSpace(), fnode.dumpTreeRecursively().toString());
assertEquals(0, counts.getStorageSpace(), fnode.dumpTreeRecursively().toString());
}
/**
* Both traditional space quota and the storage type quota for SSD are set and
* exceeded. expect DSQuotaExceededException is thrown as we check traditional
* space quota first and then storage type quota.
*/
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeAndTraditionalQuotaException1()
throws Exception {
testQuotaByStorageTypeOrTraditionalQuotaExceededCase(
4 * REPLICATION, 4, 5, REPLICATION);
}
/**
* Both traditional space quota and the storage type quota for SSD are set and
* SSD quota is exceeded but traditional space quota is not exceeded.
*/
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeAndTraditionalQuotaException2()
throws Exception {
testQuotaByStorageTypeOrTraditionalQuotaExceededCase(
5 * REPLICATION, 4, 5, REPLICATION);
}
/**
* Both traditional space quota and the storage type quota for SSD are set and
* traditional space quota is exceeded but SSD quota is not exceeded.
*/
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeAndTraditionalQuotaException3()
throws Exception {
testQuotaByStorageTypeOrTraditionalQuotaExceededCase(
4 * REPLICATION, 5, 5, REPLICATION);
}
private void testQuotaByStorageTypeOrTraditionalQuotaExceededCase(
long storageSpaceQuotaInBlocks, long ssdQuotaInBlocks,
long testFileLenInBlocks, short replication) throws Exception {
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path testDir = new Path(dir, METHOD_NAME);
dfs.mkdirs(testDir);
dfs.setStoragePolicy(testDir, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
final long ssdQuota = BLOCKSIZE * ssdQuotaInBlocks;
final long storageSpaceQuota = BLOCKSIZE * storageSpaceQuotaInBlocks;
dfs.setQuota(testDir, Long.MAX_VALUE - 1, storageSpaceQuota);
dfs.setQuotaByStorageType(testDir, StorageType.SSD, ssdQuota);
INode testDirNode = fsdir.getINode4Write(testDir.toString());
assertTrue(testDirNode.isDirectory());
assertTrue(testDirNode.isQuotaSet());
Path createdFile = new Path(testDir, "created_file.data");
long fileLen = testFileLenInBlocks * BLOCKSIZE;
try {
DFSTestUtil.createFile(dfs, createdFile, BLOCKSIZE / 16,
fileLen, BLOCKSIZE, replication, seed);
fail("Should have failed with DSQuotaExceededException or " +
"QuotaByStorageTypeExceededException ");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
long currentSSDConsumed = testDirNode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(Math.min(ssdQuota, storageSpaceQuota / replication), currentSSDConsumed);
}
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithSnapshot() throws Exception {
final Path sub1 = new Path(dir, "Sub1");
dfs.mkdirs(sub1);
// Setup ONE_SSD policy and SSD quota of 4 * BLOCKSIZE on sub1
dfs.setStoragePolicy(sub1, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
dfs.setQuotaByStorageType(sub1, StorageType.SSD, 4 * BLOCKSIZE);
INode sub1Node = fsdir.getINode4Write(sub1.toString());
assertTrue(sub1Node.isDirectory());
assertTrue(sub1Node.isQuotaSet());
// Create file1 of size 2 * BLOCKSIZE under sub1
Path file1 = new Path(sub1, "file1");
long file1Len = 2 * BLOCKSIZE;
DFSTestUtil.createFile(dfs, file1, file1Len, REPLICATION, seed);
// Create snapshot on sub1 named s1
SnapshotTestHelper.createSnapshot(dfs, sub1, "s1");
// Verify sub1 SSD usage is unchanged after creating snapshot s1
long ssdConsumed = sub1Node.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
// Delete file1
dfs.delete(file1, false);
// Verify sub1 SSD usage is unchanged due to the existence of snapshot s1
ssdConsumed = sub1Node.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
QuotaCounts counts1 = sub1Node.computeQuotaUsage(
fsn.getBlockManager().getStoragePolicySuite(), true);
assertEquals(file1Len, counts1.getTypeSpaces().get(StorageType.SSD),
sub1Node.dumpTreeRecursively().toString());
ContentSummary cs1 = dfs.getContentSummary(sub1);
assertEquals(cs1.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs1.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs1.getTypeConsumed(StorageType.DISK), file1Len * 2);
// Delete the snapshot s1
dfs.deleteSnapshot(sub1, "s1");
// Verify sub1 SSD usage is fully reclaimed and changed to 0
ssdConsumed = sub1Node.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(0, ssdConsumed);
QuotaCounts counts2 = sub1Node.computeQuotaUsage(
fsn.getBlockManager().getStoragePolicySuite(), true);
assertEquals(0, counts2.getTypeSpaces().get(StorageType.SSD),
sub1Node.dumpTreeRecursively().toString());
ContentSummary cs2 = dfs.getContentSummary(sub1);
assertEquals(cs2.getSpaceConsumed(), 0);
assertEquals(cs2.getTypeConsumed(StorageType.SSD), 0);
assertEquals(cs2.getTypeConsumed(StorageType.DISK), 0);
}
@Test
@Timeout(value = 60)
public void testQuotaByStorageTypeWithFileCreateTruncate() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
// set storage policy on directory "foo" to ONESSD
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// set quota by storage type on directory "foo"
dfs.setQuotaByStorageType(foo, StorageType.SSD, BLOCKSIZE * 4);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(fnode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify SSD consumed before truncate
long ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
// Truncate file to 1 * BLOCKSIZE
int newFile1Len = BLOCKSIZE;
dfs.truncate(createdFile1, newFile1Len);
// Verify SSD consumed after truncate
ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(newFile1Len, ssdConsumed);
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), newFile1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), newFile1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), newFile1Len * 2);
}
@Test
public void testQuotaByStorageTypePersistenceInEditLog() throws IOException {
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path testDir = new Path(dir, METHOD_NAME);
Path createdFile1 = new Path(testDir, "created_file1.data");
dfs.mkdirs(testDir);
// set storage policy on testDir to ONESSD
dfs.setStoragePolicy(testDir, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// set quota by storage type on testDir
final long SSD_QUOTA = BLOCKSIZE * 4;
dfs.setQuotaByStorageType(testDir, StorageType.SSD, SSD_QUOTA);
INode testDirNode = fsdir.getINode4Write(testDir.toString());
assertTrue(testDirNode.isDirectory());
assertTrue(testDirNode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under testDir
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify SSD consumed before namenode restart
long ssdConsumed = testDirNode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
// Restart namenode to make sure the editlog is correct
cluster.restartNameNode(true);
refreshClusterState();
INode testDirNodeAfterNNRestart = fsdir.getINode4Write(testDir.toString());
// Verify quota is still set
assertTrue(testDirNode.isDirectory());
assertTrue(testDirNode.isQuotaSet());
QuotaCounts qc = testDirNodeAfterNNRestart.getQuotaCounts();
assertEquals(SSD_QUOTA, qc.getTypeSpace(StorageType.SSD));
for (StorageType t: StorageType.getTypesSupportingQuota()) {
if (t != StorageType.SSD) {
assertEquals(HdfsConstants.QUOTA_RESET, qc.getTypeSpace(t));
}
}
long ssdConsumedAfterNNRestart = testDirNodeAfterNNRestart.asDirectory()
.getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumedAfterNNRestart);
}
@Test
public void testQuotaByStorageTypePersistenceInFsImage() throws IOException {
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path testDir = new Path(dir, METHOD_NAME);
Path createdFile1 = new Path(testDir, "created_file1.data");
dfs.mkdirs(testDir);
// set storage policy on testDir to ONESSD
dfs.setStoragePolicy(testDir, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// set quota by storage type on testDir
final long SSD_QUOTA = BLOCKSIZE * 4;
dfs.setQuotaByStorageType(testDir, StorageType.SSD, SSD_QUOTA);
INode testDirNode = fsdir.getINode4Write(testDir.toString());
assertTrue(testDirNode.isDirectory());
assertTrue(testDirNode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under testDir
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify SSD consumed before namenode restart
long ssdConsumed = testDirNode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed);
// Restart the namenode with checkpoint to make sure fsImage is correct
dfs.setSafeMode(SafeModeAction.ENTER);
dfs.saveNamespace();
dfs.setSafeMode(SafeModeAction.LEAVE);
cluster.restartNameNode(true);
refreshClusterState();
INode testDirNodeAfterNNRestart = fsdir.getINode4Write(testDir.toString());
assertTrue(testDirNode.isDirectory());
assertTrue(testDirNode.isQuotaSet());
QuotaCounts qc = testDirNodeAfterNNRestart.getQuotaCounts();
assertEquals(SSD_QUOTA, qc.getTypeSpace(StorageType.SSD));
for (StorageType t: StorageType.getTypesSupportingQuota()) {
if (t != StorageType.SSD) {
assertEquals(HdfsConstants.QUOTA_RESET, qc.getTypeSpace(t));
}
}
long ssdConsumedAfterNNRestart = testDirNodeAfterNNRestart.asDirectory()
.getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumedAfterNNRestart);
}
@Test
@Timeout(value = 60)
public void testContentSummaryWithoutQuotaByStorageType() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
// set storage policy on directory "foo" to ONESSD
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(!fnode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify getContentSummary without any quota set
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
}
@Test
@Timeout(value = 60)
public void testContentSummaryWithoutStoragePolicy() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(!fnode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify getContentSummary without any quota set
// Expect no type quota and usage information available
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
for (StorageType t : StorageType.values()) {
assertEquals(cs.getTypeConsumed(t), 0);
assertEquals(cs.getTypeQuota(t), -1);
}
}
/**
* Tests space quota for storage policy = WARM.
*/
@Test
public void testStorageSpaceQuotaWithWarmPolicy() throws IOException {
final Path testDir = new Path(dir,
GenericTestUtils.getMethodName());
assertTrue(dfs.mkdirs(testDir));
/* set policy to HOT */
dfs.setStoragePolicy(testDir, HdfsConstants.HOT_STORAGE_POLICY_NAME);
/* init space quota */
final long storageSpaceQuota = BLOCKSIZE * 6;
final long storageTypeSpaceQuota = BLOCKSIZE * 1;
/* set space quota */
dfs.setQuota(testDir, HdfsConstants.QUOTA_DONT_SET, storageSpaceQuota);
/* init vars */
Path createdFile;
final long fileLen = BLOCKSIZE;
/**
* create one file with 3 replicas, REPLICATION * BLOCKSIZE go to DISK due
* to HOT policy
*/
createdFile = new Path(testDir, "file1.data");
DFSTestUtil.createFile(dfs, createdFile, BLOCKSIZE / 16, fileLen, BLOCKSIZE,
REPLICATION, seed);
assertTrue(dfs.exists(createdFile));
assertTrue(dfs.isFile(createdFile));
/* set space quota for DISK */
dfs.setQuotaByStorageType(testDir, StorageType.DISK, storageTypeSpaceQuota);
/* set policy to WARM */
dfs.setStoragePolicy(testDir, HdfsConstants.WARM_STORAGE_POLICY_NAME);
/* create another file with 3 replicas */
try {
createdFile = new Path(testDir, "file2.data");
/**
* This will fail since quota on DISK is 1 block but space consumed on
* DISK is already 3 blocks due to the first file creation.
*/
DFSTestUtil.createFile(dfs, createdFile, BLOCKSIZE / 16, fileLen,
BLOCKSIZE, REPLICATION, seed);
fail("should fail on QuotaByStorageTypeExceededException");
} catch (QuotaByStorageTypeExceededException e) {
LOG.info("Got expected exception ", e);
assertThat(e.toString())
.contains("Quota by storage type",
"DISK on path",
testDir.toString());
}
}
/**
* Tests if changing replication factor results in copying file as quota
* doesn't exceed.
*/
@Test
@Timeout(value = 30)
public void testStorageSpaceQuotaWithRepFactor() throws IOException {
final Path testDir = new Path(dir,
GenericTestUtils.getMethodName());
assertTrue(dfs.mkdirs(testDir));
final long storageSpaceQuota = BLOCKSIZE * 2;
/* set policy to HOT */
dfs.setStoragePolicy(testDir, HdfsConstants.HOT_STORAGE_POLICY_NAME);
/* set space quota */
dfs.setQuota(testDir, HdfsConstants.QUOTA_DONT_SET, storageSpaceQuota);
/* init vars */
Path createdFile = null;
final long fileLen = BLOCKSIZE;
try {
/* create one file with 3 replicas */
createdFile = new Path(testDir, "file1.data");
DFSTestUtil.createFile(dfs, createdFile, BLOCKSIZE / 16, fileLen,
BLOCKSIZE, REPLICATION, seed);
fail("should fail on DSQuotaExceededException");
} catch (DSQuotaExceededException e) {
LOG.info("Got expected exception ", e);
assertThat(e.toString())
.contains("DiskSpace quota", testDir.toString());
}
/* try creating file again with 2 replicas */
createdFile = new Path(testDir, "file2.data");
DFSTestUtil.createFile(dfs, createdFile, BLOCKSIZE / 16, fileLen, BLOCKSIZE,
(short) 2, seed);
assertTrue(dfs.exists(createdFile));
assertTrue(dfs.isFile(createdFile));
}
/**
* Tests if clearing quota per heterogeneous storage doesn't result in
* clearing quota for another storage.
*
* @throws IOException
*/
@Test
@Timeout(value = 30)
public void testStorageSpaceQuotaPerQuotaClear() throws IOException {
final Path testDir = new Path(dir,
GenericTestUtils.getMethodName());
assertTrue(dfs.mkdirs(testDir));
final long diskSpaceQuota = BLOCKSIZE * 1;
final long ssdSpaceQuota = BLOCKSIZE * 2;
/* set space quota */
dfs.setQuotaByStorageType(testDir, StorageType.DISK, diskSpaceQuota);
dfs.setQuotaByStorageType(testDir, StorageType.SSD, ssdSpaceQuota);
final INode testDirNode = fsdir.getINode4Write(testDir.toString());
assertTrue(testDirNode.isDirectory());
assertTrue(testDirNode.isQuotaSet());
/* verify space quota by storage type */
assertEquals(diskSpaceQuota,
testDirNode.asDirectory().getDirectoryWithQuotaFeature().getQuota()
.getTypeSpace(StorageType.DISK));
assertEquals(ssdSpaceQuota,
testDirNode.asDirectory().getDirectoryWithQuotaFeature().getQuota()
.getTypeSpace(StorageType.SSD));
/* clear DISK space quota */
dfs.setQuotaByStorageType(
testDir,
StorageType.DISK,
HdfsConstants.QUOTA_RESET);
/* verify space quota by storage type after clearing DISK's */
assertEquals(-1,
testDirNode.asDirectory().getDirectoryWithQuotaFeature().getQuota()
.getTypeSpace(StorageType.DISK));
assertEquals(ssdSpaceQuota,
testDirNode.asDirectory().getDirectoryWithQuotaFeature().getQuota()
.getTypeSpace(StorageType.SSD));
}
}
|
googleapis/google-cloud-java | 37,458 | java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/ListExamplesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/data_labeling_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datalabeling.v1beta1;
/**
*
*
* <pre>
* Request message for ListExamples.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListExamplesRequest}
*/
public final class ListExamplesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.ListExamplesRequest)
ListExamplesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExamplesRequest.newBuilder() to construct.
private ListExamplesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExamplesRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListExamplesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest.class,
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Example resource parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Example resource parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. An expression for filtering Examples. For annotated datasets that
* have annotation spec set, filter by
* annotation_spec.display_name is supported. Format
* "annotation_spec.display_name = {display_name}"
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering Examples. For annotated datasets that
* have annotation spec set, filter by
* annotation_spec.display_name is supported. Format
* "annotation_spec.display_name = {display_name}"
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous
* [DataLabelingService.ListExamples] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous
* [DataLabelingService.ListExamples] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.ListExamplesRequest)) {
return super.equals(obj);
}
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest other =
(com.google.cloud.datalabeling.v1beta1.ListExamplesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListExamples.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListExamplesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.ListExamplesRequest)
com.google.cloud.datalabeling.v1beta1.ListExamplesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest.class,
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest.Builder.class);
}
// Construct using com.google.cloud.datalabeling.v1beta1.ListExamplesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesRequest getDefaultInstanceForType() {
return com.google.cloud.datalabeling.v1beta1.ListExamplesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesRequest build() {
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesRequest buildPartial() {
com.google.cloud.datalabeling.v1beta1.ListExamplesRequest result =
new com.google.cloud.datalabeling.v1beta1.ListExamplesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datalabeling.v1beta1.ListExamplesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datalabeling.v1beta1.ListExamplesRequest) {
return mergeFrom((com.google.cloud.datalabeling.v1beta1.ListExamplesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datalabeling.v1beta1.ListExamplesRequest other) {
if (other == com.google.cloud.datalabeling.v1beta1.ListExamplesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Example resource parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Example resource parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Example resource parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Example resource parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Example resource parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. An expression for filtering Examples. For annotated datasets that
* have annotation spec set, filter by
* annotation_spec.display_name is supported. Format
* "annotation_spec.display_name = {display_name}"
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering Examples. For annotated datasets that
* have annotation spec set, filter by
* annotation_spec.display_name is supported. Format
* "annotation_spec.display_name = {display_name}"
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering Examples. For annotated datasets that
* have annotation spec set, filter by
* annotation_spec.display_name is supported. Format
* "annotation_spec.display_name = {display_name}"
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An expression for filtering Examples. For annotated datasets that
* have annotation spec set, filter by
* annotation_spec.display_name is supported. Format
* "annotation_spec.display_name = {display_name}"
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An expression for filtering Examples. For annotated datasets that
* have annotation spec set, filter by
* annotation_spec.display_name is supported. Format
* "annotation_spec.display_name = {display_name}"
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous
* [DataLabelingService.ListExamples] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous
* [DataLabelingService.ListExamples] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous
* [DataLabelingService.ListExamples] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous
* [DataLabelingService.ListExamples] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous
* [DataLabelingService.ListExamples] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.ListExamplesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListExamplesRequest)
private static final com.google.cloud.datalabeling.v1beta1.ListExamplesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.ListExamplesRequest();
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExamplesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListExamplesRequest>() {
@java.lang.Override
public ListExamplesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExamplesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExamplesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/druid | 36,971 | extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/data/input/kinesis/KinesisInputFormatTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.data.input.kinesis;
import com.amazonaws.services.kinesis.model.Record;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.apache.druid.data.input.ColumnsFilter;
import org.apache.druid.data.input.InputEntityReader;
import org.apache.druid.data.input.InputFormat;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.InputRowListPlusRawValues;
import org.apache.druid.data.input.InputRowSchema;
import org.apache.druid.data.input.impl.CsvInputFormat;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.JsonInputFormat;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.indexing.common.TestUtils;
import org.apache.druid.indexing.seekablestream.SettableByteEntity;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.parsers.CloseableIterator;
import org.apache.druid.java.util.common.parsers.JSONPathFieldSpec;
import org.apache.druid.java.util.common.parsers.JSONPathFieldType;
import org.apache.druid.java.util.common.parsers.JSONPathSpec;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
public class KinesisInputFormatTest
{
private static final String KINESIS_APPROXIMATE_TIME_DATE = "2024-07-29";
private static final long KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS = DateTimes.of(KINESIS_APPROXIMATE_TIME_DATE).getMillis();
private static final String DATA_TIMSTAMP_DATE = "2024-07-30";
private static final String PARTITION_KEY = "partition_key_1";
private static final byte[] SIMPLE_JSON_VALUE_BYTES = StringUtils.toUtf8(
TestUtils.singleQuoteToStandardJson(
"{"
+ " 'timestamp': '" + DATA_TIMSTAMP_DATE + "',"
+ " 'bar': null,"
+ " 'foo': 'x',"
+ " 'baz': 4,"
+ " 'o': {'mg': 1}"
+ "}"
)
);
private KinesisInputFormat format;
@Before
public void setUp()
{
format = new KinesisInputFormat(
// Value Format
new JsonInputFormat(
new JSONPathSpec(
true,
ImmutableList.of(
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
)
),
null,
null,
false,
false
),
"kinesis.newts.partitionKey",
"kinesis.newts.timestamp"
);
}
@Test
public void testSerde() throws JsonProcessingException
{
final ObjectMapper mapper = new ObjectMapper();
KinesisInputFormat kif = new KinesisInputFormat(
// Value Format
new JsonInputFormat(
new JSONPathSpec(
true,
ImmutableList.of(
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
)
),
null,
null,
false,
false
),
"kinesis.newts.partitionKey",
"kinesis.newts.timestamp"
);
Assert.assertEquals(format, kif);
final byte[] formatBytes = mapper.writeValueAsBytes(format);
final byte[] kifBytes = mapper.writeValueAsBytes(kif);
Assert.assertArrayEquals(formatBytes, kifBytes);
}
@Test
public void testTimestampFromHeader() throws IOException
{
KinesisRecordEntity inputEntity = makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("kinesis.newts.timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of(KINESIS_APPROXIMATE_TIME_DATE), row.getTimestamp());
Assert.assertEquals(
String.valueOf(KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testRawSample() throws IOException
{
KinesisRecordEntity inputEntity = makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec(KinesisInputFormat.DEFAULT_AUTO_TIMESTAMP_STRING, "auto", DateTimes.EPOCH),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRowListPlusRawValues> iterator = reader.sample()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRowListPlusRawValues rawValues = iterator.next();
Assert.assertEquals(1, rawValues.getInputRows().size());
InputRow row = rawValues.getInputRows().get(0);
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(
String.valueOf(KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testProcessesSampleTimestampFromHeader() throws IOException
{
KinesisRecordEntity inputEntity = makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("kinesis.newts.timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRowListPlusRawValues> iterator = reader.sample()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRowListPlusRawValues rawValues = iterator.next();
Assert.assertEquals(1, rawValues.getInputRows().size());
InputRow row = rawValues.getInputRows().get(0);
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of(String.valueOf(KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS)), row.getTimestamp());
Assert.assertEquals(
String.valueOf(KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testWithMultipleMixedRecordsTimestampFromHeader() throws IOException
{
final byte[][] values = new byte[5][];
for (int i = 0; i < values.length; i++) {
values[i] = StringUtils.toUtf8(
"{\n"
+ " \"timestamp\": \"2024-07-2" + i + "\",\n"
+ " \"bar\": null,\n"
+ " \"foo\": \"x\",\n"
+ " \"baz\": 4,\n"
+ " \"index\": " + i + ",\n"
+ " \"o\": {\n"
+ " \"mg\": 1\n"
+ " }\n"
+ "}"
);
}
SettableByteEntity<KinesisRecordEntity> settableByteEntity = new SettableByteEntity<>();
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("kinesis.newts.timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kinesis.newts.timestamp"
)
)
),
ColumnsFilter.all()
),
settableByteEntity,
null
);
for (int i = 0; i < values.length; i++) {
KinesisRecordEntity inputEntity = makeInputEntity(values[i], DateTimes.of("2024-07-1" + i).getMillis());
settableByteEntity.setEntity(inputEntity);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Payload verification
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2024-07-1" + i), row.getTimestamp());
Assert.assertEquals(
String.valueOf(DateTimes.of("2024-07-1" + i).getMillis()),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertEquals(String.valueOf(i), Iterables.getOnlyElement(row.getDimension("index")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
}
@Test
public void testTimestampFromData() throws IOException
{
KinesisRecordEntity inputEntity = makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of(DATA_TIMSTAMP_DATE), row.getTimestamp());
Assert.assertEquals(
String.valueOf(KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testWithMultipleMixedRecordsTimestampFromData() throws IOException
{
final byte[][] values = new byte[5][];
for (int i = 0; i < values.length; i++) {
values[i] = StringUtils.toUtf8(
"{\n"
+ " \"timestamp\": \"2024-07-2" + i + "\",\n"
+ " \"bar\": null,\n"
+ " \"foo\": \"x\",\n"
+ " \"baz\": 4,\n"
+ " \"index\": " + i + ",\n"
+ " \"o\": {\n"
+ " \"mg\": 1\n"
+ " }\n"
+ "}"
);
}
SettableByteEntity<KinesisRecordEntity> settableByteEntity = new SettableByteEntity<>();
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kinesis.newts.timestamp"
)
)
),
ColumnsFilter.all()
),
settableByteEntity,
null
);
for (int i = 0; i < values.length; i++) {
KinesisRecordEntity inputEntity = makeInputEntity(values[i], KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
settableByteEntity.setEntity(inputEntity);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Payload verification
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2024-07-2" + i), row.getTimestamp());
Assert.assertEquals(
String.valueOf(DateTimes.of("2024-07-29").getMillis()),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertEquals(String.valueOf(i), Iterables.getOnlyElement(row.getDimension("index")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
}
@Test
public void testMissingTimestampThrowsException() throws IOException
{
KinesisRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("time", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kinesis.newts.timestamp"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
try (CloseableIterator<InputRow> iterator = reader.read()) {
while (iterator.hasNext()) {
Throwable t = Assert.assertThrows(ParseException.class, iterator::next);
Assert.assertTrue(
t.getMessage().startsWith("Timestamp[null] is unparseable! Event: {")
);
}
}
}
@Test
public void testWithSchemaDiscoveryKinesisTimestampExcluded() throws IOException
{
KinesisRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
DimensionsSpec.builder()
.useSchemaDiscovery(true)
.setDimensionExclusions(ImmutableList.of("kinesis.newts.timestamp"))
.build(),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
List<String> expectedDimensions = Arrays.asList(
"foo",
"root_baz",
"o",
"bar",
"path_omg",
"jq_omg",
"jq_omg2",
"baz",
"root_baz2",
"path_omg2",
"kinesis.newts.partitionKey"
);
Collections.sort(expectedDimensions);
Collections.sort(row.getDimensions());
Assert.assertEquals(
expectedDimensions,
row.getDimensions()
);
// Payload verifications
Assert.assertEquals(DateTimes.of(DATA_TIMSTAMP_DATE), row.getTimestamp());
Assert.assertEquals(
String.valueOf(DateTimes.of(KINESIS_APPROXIMATE_TIME_DATE).getMillis()),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testWithSchemaDiscoveryTimestampFromHeader() throws IOException
{
KinesisRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("kinesis.newts.timestamp", "iso", null),
DimensionsSpec.builder()
.useSchemaDiscovery(true)
.build(),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
List<String> expectedDimensions = Arrays.asList(
"foo",
"timestamp",
"root_baz",
"o",
"bar",
"path_omg",
"jq_omg",
"jq_omg2",
"baz",
"root_baz2",
"path_omg2",
"kinesis.newts.partitionKey"
);
Collections.sort(expectedDimensions);
Collections.sort(row.getDimensions());
Assert.assertEquals(
expectedDimensions,
row.getDimensions()
);
// Payload verifications
Assert.assertEquals(DateTimes.of(KINESIS_APPROXIMATE_TIME_DATE), row.getTimestamp());
Assert.assertEquals(
String.valueOf(DateTimes.of(KINESIS_APPROXIMATE_TIME_DATE).getMillis()),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testValueInCsvFormat() throws IOException
{
format = new KinesisInputFormat(
// Value Format
new CsvInputFormat(
Arrays.asList("foo", "bar", "timestamp", "baz"),
null,
false,
false,
0,
null
),
"kinesis.newts.partitionKey",
"kinesis.newts.timestamp"
);
KinesisRecordEntity inputEntity =
makeInputEntity(StringUtils.toUtf8("x,,2024-07-30,4"), KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kinesis.newts.timestamp",
"kinesis.newts.partitionKey"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
Assert.assertEquals(
Arrays.asList(
"bar",
"foo",
"kinesis.newts.timestamp",
"kinesis.newts.partitionKey"
),
row.getDimensions()
);
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2024-07-30"), row.getTimestamp());
Assert.assertEquals(
String.valueOf(DateTimes.of(KINESIS_APPROXIMATE_TIME_DATE).getMillis()),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertTrue(row.getDimension("bar").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testWithPartialDeclarationSchemaDiscovery() throws IOException
{
KinesisRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_VALUE_BYTES, KINESIS_APPROXOIMATE_TIMESTAMP_MILLIS);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
DimensionsSpec.builder().setDimensions(
DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar"))
).useSchemaDiscovery(true).build(),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
List<String> expectedDimensions = Arrays.asList(
"bar",
"foo",
"kinesis.newts.timestamp",
"kinesis.newts.partitionKey",
"root_baz",
"o",
"path_omg",
"jq_omg",
"jq_omg2",
"baz",
"root_baz2",
"path_omg2"
);
Collections.sort(expectedDimensions);
Collections.sort(row.getDimensions());
Assert.assertEquals(
expectedDimensions,
row.getDimensions()
);
// Payload verifications
Assert.assertEquals(DateTimes.of(DATA_TIMSTAMP_DATE), row.getTimestamp());
Assert.assertEquals(
String.valueOf(DateTimes.of(KINESIS_APPROXIMATE_TIME_DATE).getMillis()),
Iterables.getOnlyElement(row.getDimension("kinesis.newts.timestamp"))
);
Assert.assertEquals(PARTITION_KEY, Iterables.getOnlyElement(row.getDimension("kinesis.newts.partitionKey")));
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void testValidInputFormatConstruction()
{
InputFormat valueFormat = new JsonInputFormat(
new JSONPathSpec(
true,
ImmutableList.of(
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
)
),
null,
null,
false,
false
);
// null partitionKeyColumnName and null timestampColumnName is valid
new KinesisInputFormat(valueFormat, null, null);
// non-null partitionKeyColumnName and null timestampColumnName is valid
new KinesisInputFormat(valueFormat, "kinesis.partitionKey", null);
// null partitionKeyColumnName and non-null timestampColumnName is valid
new KinesisInputFormat(valueFormat, null, "kinesis.timestamp");
// non-null partitionKeyColumnName and non-null timestampColumnName is valid
new KinesisInputFormat(valueFormat, "kinesis.partitionKey", "kinesis.timestamp");
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void testInvalidInputFormatConstruction()
{
// null value format is invalid
Assert.assertThrows(
"valueFormat must not be null",
NullPointerException.class,
() -> new KinesisInputFormat(null, null, null)
);
InputFormat valueFormat = new JsonInputFormat(
new JSONPathSpec(
true,
ImmutableList.of(
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
)
),
null,
null,
false,
false
);
// partitionKeyColumnName == timestampColumnName is invalid
Assert.assertThrows(
"timestampColumnName and partitionKeyColumnName must be different",
IllegalStateException.class,
() -> new KinesisInputFormat(valueFormat, "kinesis.timestamp", "kinesis.timestamp")
);
}
private KinesisRecordEntity makeInputEntity(
byte[] payload,
long kinesisTimestampMillis)
{
return new KinesisRecordEntity(
new Record().withData(ByteBuffer.wrap(payload))
.withApproximateArrivalTimestamp(new Date(kinesisTimestampMillis))
.withPartitionKey(PARTITION_KEY)
);
}
private SettableByteEntity<KinesisRecordEntity> newSettableByteEntity(KinesisRecordEntity kinesisRecordEntity)
{
SettableByteEntity<KinesisRecordEntity> settableByteEntity = new SettableByteEntity<>();
settableByteEntity.setEntity(kinesisRecordEntity);
return settableByteEntity;
}
}
|
googleapis/google-cloud-java | 37,495 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/SampleConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/data_labeling_job.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Active learning data sampling config. For every active learning labeling
* iteration, it will select a batch of data based on the sampling strategy.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.SampleConfig}
*/
public final class SampleConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.SampleConfig)
SampleConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use SampleConfig.newBuilder() to construct.
private SampleConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SampleConfig() {
sampleStrategy_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SampleConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DataLabelingJobProto
.internal_static_google_cloud_aiplatform_v1_SampleConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DataLabelingJobProto
.internal_static_google_cloud_aiplatform_v1_SampleConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.SampleConfig.class,
com.google.cloud.aiplatform.v1.SampleConfig.Builder.class);
}
/**
*
*
* <pre>
* Sample strategy decides which subset of DataItems should be selected for
* human labeling in every batch.
* </pre>
*
* Protobuf enum {@code google.cloud.aiplatform.v1.SampleConfig.SampleStrategy}
*/
public enum SampleStrategy implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Default will be treated as UNCERTAINTY.
* </pre>
*
* <code>SAMPLE_STRATEGY_UNSPECIFIED = 0;</code>
*/
SAMPLE_STRATEGY_UNSPECIFIED(0),
/**
*
*
* <pre>
* Sample the most uncertain data to label.
* </pre>
*
* <code>UNCERTAINTY = 1;</code>
*/
UNCERTAINTY(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Default will be treated as UNCERTAINTY.
* </pre>
*
* <code>SAMPLE_STRATEGY_UNSPECIFIED = 0;</code>
*/
public static final int SAMPLE_STRATEGY_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Sample the most uncertain data to label.
* </pre>
*
* <code>UNCERTAINTY = 1;</code>
*/
public static final int UNCERTAINTY_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SampleStrategy valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SampleStrategy forNumber(int value) {
switch (value) {
case 0:
return SAMPLE_STRATEGY_UNSPECIFIED;
case 1:
return UNCERTAINTY;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<SampleStrategy> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<SampleStrategy> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<SampleStrategy>() {
public SampleStrategy findValueByNumber(int number) {
return SampleStrategy.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.SampleConfig.getDescriptor().getEnumTypes().get(0);
}
private static final SampleStrategy[] VALUES = values();
public static SampleStrategy valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private SampleStrategy(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1.SampleConfig.SampleStrategy)
}
private int initialBatchSampleSizeCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object initialBatchSampleSize_;
public enum InitialBatchSampleSizeCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
INITIAL_BATCH_SAMPLE_PERCENTAGE(1),
INITIALBATCHSAMPLESIZE_NOT_SET(0);
private final int value;
private InitialBatchSampleSizeCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static InitialBatchSampleSizeCase valueOf(int value) {
return forNumber(value);
}
public static InitialBatchSampleSizeCase forNumber(int value) {
switch (value) {
case 1:
return INITIAL_BATCH_SAMPLE_PERCENTAGE;
case 0:
return INITIALBATCHSAMPLESIZE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public InitialBatchSampleSizeCase getInitialBatchSampleSizeCase() {
return InitialBatchSampleSizeCase.forNumber(initialBatchSampleSizeCase_);
}
private int followingBatchSampleSizeCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object followingBatchSampleSize_;
public enum FollowingBatchSampleSizeCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
FOLLOWING_BATCH_SAMPLE_PERCENTAGE(3),
FOLLOWINGBATCHSAMPLESIZE_NOT_SET(0);
private final int value;
private FollowingBatchSampleSizeCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static FollowingBatchSampleSizeCase valueOf(int value) {
return forNumber(value);
}
public static FollowingBatchSampleSizeCase forNumber(int value) {
switch (value) {
case 3:
return FOLLOWING_BATCH_SAMPLE_PERCENTAGE;
case 0:
return FOLLOWINGBATCHSAMPLESIZE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public FollowingBatchSampleSizeCase getFollowingBatchSampleSizeCase() {
return FollowingBatchSampleSizeCase.forNumber(followingBatchSampleSizeCase_);
}
public static final int INITIAL_BATCH_SAMPLE_PERCENTAGE_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* The percentage of data needed to be labeled in the first batch.
* </pre>
*
* <code>int32 initial_batch_sample_percentage = 1;</code>
*
* @return Whether the initialBatchSamplePercentage field is set.
*/
@java.lang.Override
public boolean hasInitialBatchSamplePercentage() {
return initialBatchSampleSizeCase_ == 1;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in the first batch.
* </pre>
*
* <code>int32 initial_batch_sample_percentage = 1;</code>
*
* @return The initialBatchSamplePercentage.
*/
@java.lang.Override
public int getInitialBatchSamplePercentage() {
if (initialBatchSampleSizeCase_ == 1) {
return (java.lang.Integer) initialBatchSampleSize_;
}
return 0;
}
public static final int FOLLOWING_BATCH_SAMPLE_PERCENTAGE_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* The percentage of data needed to be labeled in each following batch
* (except the first batch).
* </pre>
*
* <code>int32 following_batch_sample_percentage = 3;</code>
*
* @return Whether the followingBatchSamplePercentage field is set.
*/
@java.lang.Override
public boolean hasFollowingBatchSamplePercentage() {
return followingBatchSampleSizeCase_ == 3;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in each following batch
* (except the first batch).
* </pre>
*
* <code>int32 following_batch_sample_percentage = 3;</code>
*
* @return The followingBatchSamplePercentage.
*/
@java.lang.Override
public int getFollowingBatchSamplePercentage() {
if (followingBatchSampleSizeCase_ == 3) {
return (java.lang.Integer) followingBatchSampleSize_;
}
return 0;
}
public static final int SAMPLE_STRATEGY_FIELD_NUMBER = 5;
private int sampleStrategy_ = 0;
/**
*
*
* <pre>
* Field to choose sampling strategy. Sampling strategy will decide which data
* should be selected for human labeling in every batch.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy sample_strategy = 5;</code>
*
* @return The enum numeric value on the wire for sampleStrategy.
*/
@java.lang.Override
public int getSampleStrategyValue() {
return sampleStrategy_;
}
/**
*
*
* <pre>
* Field to choose sampling strategy. Sampling strategy will decide which data
* should be selected for human labeling in every batch.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy sample_strategy = 5;</code>
*
* @return The sampleStrategy.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy getSampleStrategy() {
com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy result =
com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy.forNumber(sampleStrategy_);
return result == null
? com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (initialBatchSampleSizeCase_ == 1) {
output.writeInt32(1, (int) ((java.lang.Integer) initialBatchSampleSize_));
}
if (followingBatchSampleSizeCase_ == 3) {
output.writeInt32(3, (int) ((java.lang.Integer) followingBatchSampleSize_));
}
if (sampleStrategy_
!= com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy.SAMPLE_STRATEGY_UNSPECIFIED
.getNumber()) {
output.writeEnum(5, sampleStrategy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (initialBatchSampleSizeCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
1, (int) ((java.lang.Integer) initialBatchSampleSize_));
}
if (followingBatchSampleSizeCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
3, (int) ((java.lang.Integer) followingBatchSampleSize_));
}
if (sampleStrategy_
!= com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy.SAMPLE_STRATEGY_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, sampleStrategy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.SampleConfig)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.SampleConfig other =
(com.google.cloud.aiplatform.v1.SampleConfig) obj;
if (sampleStrategy_ != other.sampleStrategy_) return false;
if (!getInitialBatchSampleSizeCase().equals(other.getInitialBatchSampleSizeCase()))
return false;
switch (initialBatchSampleSizeCase_) {
case 1:
if (getInitialBatchSamplePercentage() != other.getInitialBatchSamplePercentage())
return false;
break;
case 0:
default:
}
if (!getFollowingBatchSampleSizeCase().equals(other.getFollowingBatchSampleSizeCase()))
return false;
switch (followingBatchSampleSizeCase_) {
case 3:
if (getFollowingBatchSamplePercentage() != other.getFollowingBatchSamplePercentage())
return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SAMPLE_STRATEGY_FIELD_NUMBER;
hash = (53 * hash) + sampleStrategy_;
switch (initialBatchSampleSizeCase_) {
case 1:
hash = (37 * hash) + INITIAL_BATCH_SAMPLE_PERCENTAGE_FIELD_NUMBER;
hash = (53 * hash) + getInitialBatchSamplePercentage();
break;
case 0:
default:
}
switch (followingBatchSampleSizeCase_) {
case 3:
hash = (37 * hash) + FOLLOWING_BATCH_SAMPLE_PERCENTAGE_FIELD_NUMBER;
hash = (53 * hash) + getFollowingBatchSamplePercentage();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SampleConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.SampleConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Active learning data sampling config. For every active learning labeling
* iteration, it will select a batch of data based on the sampling strategy.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.SampleConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.SampleConfig)
com.google.cloud.aiplatform.v1.SampleConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DataLabelingJobProto
.internal_static_google_cloud_aiplatform_v1_SampleConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DataLabelingJobProto
.internal_static_google_cloud_aiplatform_v1_SampleConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.SampleConfig.class,
com.google.cloud.aiplatform.v1.SampleConfig.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.SampleConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
sampleStrategy_ = 0;
initialBatchSampleSizeCase_ = 0;
initialBatchSampleSize_ = null;
followingBatchSampleSizeCase_ = 0;
followingBatchSampleSize_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.DataLabelingJobProto
.internal_static_google_cloud_aiplatform_v1_SampleConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SampleConfig getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.SampleConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SampleConfig build() {
com.google.cloud.aiplatform.v1.SampleConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SampleConfig buildPartial() {
com.google.cloud.aiplatform.v1.SampleConfig result =
new com.google.cloud.aiplatform.v1.SampleConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.SampleConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.sampleStrategy_ = sampleStrategy_;
}
}
private void buildPartialOneofs(com.google.cloud.aiplatform.v1.SampleConfig result) {
result.initialBatchSampleSizeCase_ = initialBatchSampleSizeCase_;
result.initialBatchSampleSize_ = this.initialBatchSampleSize_;
result.followingBatchSampleSizeCase_ = followingBatchSampleSizeCase_;
result.followingBatchSampleSize_ = this.followingBatchSampleSize_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.SampleConfig) {
return mergeFrom((com.google.cloud.aiplatform.v1.SampleConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.SampleConfig other) {
if (other == com.google.cloud.aiplatform.v1.SampleConfig.getDefaultInstance()) return this;
if (other.sampleStrategy_ != 0) {
setSampleStrategyValue(other.getSampleStrategyValue());
}
switch (other.getInitialBatchSampleSizeCase()) {
case INITIAL_BATCH_SAMPLE_PERCENTAGE:
{
setInitialBatchSamplePercentage(other.getInitialBatchSamplePercentage());
break;
}
case INITIALBATCHSAMPLESIZE_NOT_SET:
{
break;
}
}
switch (other.getFollowingBatchSampleSizeCase()) {
case FOLLOWING_BATCH_SAMPLE_PERCENTAGE:
{
setFollowingBatchSamplePercentage(other.getFollowingBatchSamplePercentage());
break;
}
case FOLLOWINGBATCHSAMPLESIZE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
initialBatchSampleSize_ = input.readInt32();
initialBatchSampleSizeCase_ = 1;
break;
} // case 8
case 24:
{
followingBatchSampleSize_ = input.readInt32();
followingBatchSampleSizeCase_ = 3;
break;
} // case 24
case 40:
{
sampleStrategy_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 40
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int initialBatchSampleSizeCase_ = 0;
private java.lang.Object initialBatchSampleSize_;
public InitialBatchSampleSizeCase getInitialBatchSampleSizeCase() {
return InitialBatchSampleSizeCase.forNumber(initialBatchSampleSizeCase_);
}
public Builder clearInitialBatchSampleSize() {
initialBatchSampleSizeCase_ = 0;
initialBatchSampleSize_ = null;
onChanged();
return this;
}
private int followingBatchSampleSizeCase_ = 0;
private java.lang.Object followingBatchSampleSize_;
public FollowingBatchSampleSizeCase getFollowingBatchSampleSizeCase() {
return FollowingBatchSampleSizeCase.forNumber(followingBatchSampleSizeCase_);
}
public Builder clearFollowingBatchSampleSize() {
followingBatchSampleSizeCase_ = 0;
followingBatchSampleSize_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* The percentage of data needed to be labeled in the first batch.
* </pre>
*
* <code>int32 initial_batch_sample_percentage = 1;</code>
*
* @return Whether the initialBatchSamplePercentage field is set.
*/
public boolean hasInitialBatchSamplePercentage() {
return initialBatchSampleSizeCase_ == 1;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in the first batch.
* </pre>
*
* <code>int32 initial_batch_sample_percentage = 1;</code>
*
* @return The initialBatchSamplePercentage.
*/
public int getInitialBatchSamplePercentage() {
if (initialBatchSampleSizeCase_ == 1) {
return (java.lang.Integer) initialBatchSampleSize_;
}
return 0;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in the first batch.
* </pre>
*
* <code>int32 initial_batch_sample_percentage = 1;</code>
*
* @param value The initialBatchSamplePercentage to set.
* @return This builder for chaining.
*/
public Builder setInitialBatchSamplePercentage(int value) {
initialBatchSampleSizeCase_ = 1;
initialBatchSampleSize_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in the first batch.
* </pre>
*
* <code>int32 initial_batch_sample_percentage = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearInitialBatchSamplePercentage() {
if (initialBatchSampleSizeCase_ == 1) {
initialBatchSampleSizeCase_ = 0;
initialBatchSampleSize_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in each following batch
* (except the first batch).
* </pre>
*
* <code>int32 following_batch_sample_percentage = 3;</code>
*
* @return Whether the followingBatchSamplePercentage field is set.
*/
public boolean hasFollowingBatchSamplePercentage() {
return followingBatchSampleSizeCase_ == 3;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in each following batch
* (except the first batch).
* </pre>
*
* <code>int32 following_batch_sample_percentage = 3;</code>
*
* @return The followingBatchSamplePercentage.
*/
public int getFollowingBatchSamplePercentage() {
if (followingBatchSampleSizeCase_ == 3) {
return (java.lang.Integer) followingBatchSampleSize_;
}
return 0;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in each following batch
* (except the first batch).
* </pre>
*
* <code>int32 following_batch_sample_percentage = 3;</code>
*
* @param value The followingBatchSamplePercentage to set.
* @return This builder for chaining.
*/
public Builder setFollowingBatchSamplePercentage(int value) {
followingBatchSampleSizeCase_ = 3;
followingBatchSampleSize_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The percentage of data needed to be labeled in each following batch
* (except the first batch).
* </pre>
*
* <code>int32 following_batch_sample_percentage = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearFollowingBatchSamplePercentage() {
if (followingBatchSampleSizeCase_ == 3) {
followingBatchSampleSizeCase_ = 0;
followingBatchSampleSize_ = null;
onChanged();
}
return this;
}
private int sampleStrategy_ = 0;
/**
*
*
* <pre>
* Field to choose sampling strategy. Sampling strategy will decide which data
* should be selected for human labeling in every batch.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy sample_strategy = 5;</code>
*
* @return The enum numeric value on the wire for sampleStrategy.
*/
@java.lang.Override
public int getSampleStrategyValue() {
return sampleStrategy_;
}
/**
*
*
* <pre>
* Field to choose sampling strategy. Sampling strategy will decide which data
* should be selected for human labeling in every batch.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy sample_strategy = 5;</code>
*
* @param value The enum numeric value on the wire for sampleStrategy to set.
* @return This builder for chaining.
*/
public Builder setSampleStrategyValue(int value) {
sampleStrategy_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Field to choose sampling strategy. Sampling strategy will decide which data
* should be selected for human labeling in every batch.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy sample_strategy = 5;</code>
*
* @return The sampleStrategy.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy getSampleStrategy() {
com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy result =
com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy.forNumber(sampleStrategy_);
return result == null
? com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Field to choose sampling strategy. Sampling strategy will decide which data
* should be selected for human labeling in every batch.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy sample_strategy = 5;</code>
*
* @param value The sampleStrategy to set.
* @return This builder for chaining.
*/
public Builder setSampleStrategy(
com.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
sampleStrategy_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Field to choose sampling strategy. Sampling strategy will decide which data
* should be selected for human labeling in every batch.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.SampleConfig.SampleStrategy sample_strategy = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearSampleStrategy() {
bitField0_ = (bitField0_ & ~0x00000004);
sampleStrategy_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.SampleConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.SampleConfig)
private static final com.google.cloud.aiplatform.v1.SampleConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.SampleConfig();
}
public static com.google.cloud.aiplatform.v1.SampleConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SampleConfig> PARSER =
new com.google.protobuf.AbstractParser<SampleConfig>() {
@java.lang.Override
public SampleConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SampleConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SampleConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SampleConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 34,599 | jdk/src/share/classes/com/sun/tools/example/debug/tty/TTYResources_zh_CN.java | /*
* Copyright (c) 2001, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This source code is provided to illustrate the usage of a given feature
* or technique and has been deliberately simplified. Additional steps
* required for a production-quality application, such as security checks,
* input validation and proper error handling, might not be present in
* this sample code.
*/
package com.sun.tools.example.debug.tty;
/**
* <p> This class represents the <code>ResourceBundle</code>
* for the following package(s):
*
* <ol>
* <li> com.sun.tools.example.debug.tty
* </ol>
*
*/
public class TTYResources_zh_CN extends java.util.ListResourceBundle {
/**
* Returns the contents of this <code>ResourceBundle</code>.
*
* <p>
*
* @return the contents of this <code>ResourceBundle</code>.
*/
@Override
public Object[][] getContents() {
Object[][] temp = new Object[][] {
// NOTE: The value strings in this file containing "{0}" are
// processed by the java.text.MessageFormat class. Any
// single quotes appearing in these strings need to be
// doubled up.
//
// LOCALIZE THIS
{"** classes list **", "** \u7C7B\u5217\u8868 **\n{0}"},
{"** fields list **", "** \u5B57\u6BB5\u5217\u8868 **\n{0}"},
{"** methods list **", "** \u65B9\u6CD5\u5217\u8868 **\n{0}"},
{"*** Reading commands from", "*** \u6B63\u5728\u4ECE{0}\u8BFB\u53D6\u547D\u4EE4"},
{"All threads resumed.", "\u5DF2\u6062\u590D\u6240\u6709\u7EBF\u7A0B\u3002"},
{"All threads suspended.", "\u5DF2\u6302\u8D77\u6240\u6709\u7EBF\u7A0B\u3002"},
{"Argument is not defined for connector:", "\u6CA1\u6709\u4E3A\u8FDE\u63A5\u5668{1}\u5B9A\u4E49\u53C2\u6570{0}"},
{"Arguments match no method", "\u53C2\u6570\u4E0D\u4E0E\u4EFB\u4F55\u65B9\u6CD5\u5339\u914D"},
{"Array:", "\u6570\u7EC4: {0}"},
{"Array element is not a method", "\u6570\u7EC4\u5143\u7D20\u4E0D\u662F\u65B9\u6CD5"},
{"Array index must be a integer type", "\u6570\u7EC4\u7D22\u5F15\u5FC5\u987B\u4E3A\u6574\u6570\u7C7B\u578B"},
{"base directory:", "\u57FA\u76EE\u5F55: {0}"},
{"bootclasspath:", "\u5F15\u5BFC\u7C7B\u8DEF\u5F84: {0}"},
{"Breakpoint hit:", "\u65AD\u70B9\u547D\u4E2D: "},
{"breakpoint", "\u65AD\u70B9{0}"},
{"Breakpoints set:", "\u65AD\u70B9\u96C6:"},
{"Breakpoints can be located only in classes.", "\u65AD\u70B9\u53EA\u80FD\u4F4D\u4E8E\u7C7B\u4E2D\u3002{0}\u662F\u63A5\u53E3\u6216\u6570\u7EC4\u3002"},
{"Can only trace", "\u53EA\u80FD\u8DDF\u8E2A 'methods', 'method exit' \u6216 'method exits'"},
{"cannot redefine existing connection", "{0}\u65E0\u6CD5\u91CD\u65B0\u5B9A\u4E49\u73B0\u6709\u8FDE\u63A5"},
{"Cannot assign to a method invocation", "\u65E0\u6CD5\u5206\u914D\u5230\u65B9\u6CD5\u8C03\u7528"},
{"Cannot specify command line with connector:", "\u65E0\u6CD5\u6307\u5B9A\u5E26\u6709\u8FDE\u63A5\u5668\u7684\u547D\u4EE4\u884C: {0}"},
{"Cannot specify target vm arguments with connector:", "\u65E0\u6CD5\u6307\u5B9A\u5E26\u6709\u8FDE\u63A5\u5668\u7684\u76EE\u6807 VM \u53C2\u6570: {0}"},
{"Class containing field must be specified.", "\u5FC5\u987B\u6307\u5B9A\u5305\u542B\u5B57\u6BB5\u7684\u7C7B\u3002"},
{"Class:", "\u7C7B: {0}"},
{"Classic VM no longer supported.", "\u4E0D\u518D\u652F\u6301\u7ECF\u5178 VM\u3002"},
{"classpath:", "\u7C7B\u8DEF\u5F84: {0}"},
{"colon mark", ":"},
{"colon space", ": "},
{"Command is not supported on the target VM", "\u76EE\u6807 VM \u4E0D\u652F\u6301\u547D\u4EE4 ''{0}''"},
{"Command is not supported on a read-only VM connection", "\u53EA\u8BFB VM \u8FDE\u63A5\u4E0D\u652F\u6301\u547D\u4EE4 ''{0}''"},
{"Command not valid until the VM is started with the run command", "\u5728\u4F7F\u7528 ''run'' \u547D\u4EE4\u542F\u52A8 VM \u524D, \u547D\u4EE4 ''{0}'' \u662F\u65E0\u6548\u7684"},
{"Condition must be boolean", "\u6761\u4EF6\u5FC5\u987B\u662F\u5E03\u5C14\u578B"},
{"Connector and Transport name", " \u8FDE\u63A5\u5668: {0}, \u4F20\u8F93: {1}"},
{"Connector argument nodefault", " \u53C2\u6570: {0} (\u65E0\u9ED8\u8BA4\u503C)"},
{"Connector argument default", " \u53C2\u6570: {0}, \u9ED8\u8BA4\u503C: {1}"},
{"Connector description", " \u8BF4\u660E: {0}"},
{"Connector required argument nodefault", " \u6240\u9700\u7684\u53C2\u6570: {0} (\u65E0\u9ED8\u8BA4\u503C)"},
{"Connector required argument default", " \u6240\u9700\u7684\u53C2\u6570: {0}, \u9ED8\u8BA4\u503C: {1}"},
{"Connectors available", "\u53EF\u7528\u8FDE\u63A5\u5668\u4E3A:"},
{"Constant is not a method", "\u5E38\u91CF\u4E0D\u662F\u65B9\u6CD5"},
{"Could not open:", "\u65E0\u6CD5\u6253\u5F00: {0}"},
{"Current method is native", "\u5F53\u524D\u65B9\u6CD5\u4E3A\u672C\u673A\u65B9\u6CD5"},
{"Current thread died. Execution continuing...", "\u5F53\u524D\u7EBF\u7A0B{0}\u5DF2\u6210\u4E3A\u6B7B\u7EBF\u7A0B\u3002\u7EE7\u7EED\u6267\u884C..."},
{"Current thread isnt suspended.", "\u5F53\u524D\u7EBF\u7A0B\u672A\u6302\u8D77\u3002"},
{"Current thread not set.", "\u5F53\u524D\u7EBF\u7A0B\u672A\u8BBE\u7F6E\u3002"},
{"dbgtrace flag value must be an integer:", "dbgtrace \u6807\u8BB0\u503C\u5FC5\u987B\u4E3A\u6574\u6570: {0}"},
{"Deferring.", "\u6B63\u5728\u5EF6\u8FDF{0}\u3002\n\u5C06\u5728\u52A0\u8F7D\u7C7B\u540E\u8BBE\u7F6E\u3002"},
{"End of stack.", "\u5806\u6808\u7ED3\u675F\u3002"},
{"Error popping frame", "\u4F7F\u5E27\u51FA\u6808\u65F6\u51FA\u9519 - {0}"},
{"Error reading file", "\u8BFB\u53D6 ''{0}'' \u65F6\u51FA\u9519 - {1}"},
{"Error redefining class to file", "\u5C06{0}\u91CD\u65B0\u5B9A\u4E49\u4E3A{1}\u65F6\u51FA\u9519 - {2}"},
{"exceptionSpec all", "\u6240\u6709{0}"},
{"exceptionSpec caught", "\u6355\u83B7\u7684{0}"},
{"exceptionSpec uncaught", "\u672A\u6355\u83B7\u7684{0}"},
{"Exception in expression:", "\u8868\u8FBE\u5F0F\u4E2D\u51FA\u73B0\u5F02\u5E38\u9519\u8BEF: {0}"},
{"Exception occurred caught", "\u51FA\u73B0\u5F02\u5E38\u9519\u8BEF: {0} (\u5C06\u5728\u4EE5\u4E0B\u4F4D\u7F6E\u6355\u83B7: {1})"},
{"Exception occurred uncaught", "\u51FA\u73B0\u5F02\u5E38\u9519\u8BEF: {0} (\u672A\u6355\u83B7)"},
{"Exceptions caught:", "\u51FA\u73B0\u8FD9\u4E9B\u5F02\u5E38\u9519\u8BEF\u65F6\u4E2D\u65AD:"},
{"expr is null", "{0} = \u7A7A\u503C"},
{"expr is value", "{0} = {1}"},
{"expr is value <collected>", " {0} = {1} <\u5DF2\u6536\u96C6>"},
{"Expression cannot be void", "\u8868\u8FBE\u5F0F\u4E0D\u80FD\u4E3A\u7A7A"},
{"Expression must evaluate to an object", "\u8868\u8FBE\u5F0F\u7684\u8BA1\u7B97\u7ED3\u679C\u5FC5\u987B\u4E3A\u5BF9\u8C61"},
{"extends:", "\u6269\u5C55: {0}"},
{"Failed reading output", "\u65E0\u6CD5\u8BFB\u53D6\u5B50 Java \u89E3\u91CA\u5668\u7684\u8F93\u51FA\u3002"},
{"Fatal error", "\u81F4\u547D\u9519\u8BEF:"},
{"Field access encountered before after", "\u5B57\u6BB5 ({0}) \u4E3A{1}, \u5C06\u4E3A{2}: "},
{"Field access encountered", "\u9047\u5230\u5B57\u6BB5 ({0}) \u8BBF\u95EE: "},
{"Field to unwatch not specified", "\u672A\u6307\u5B9A\u8981\u53D6\u6D88\u76D1\u89C6\u7684\u5B57\u6BB5\u3002"},
{"Field to watch not specified", "\u672A\u6307\u5B9A\u8981\u76D1\u89C6\u7684\u5B57\u6BB5\u3002"},
{"GC Disabled for", "\u5DF2\u5BF9{0}\u7981\u7528 GC:"},
{"GC Enabled for", "\u5DF2\u5BF9{0}\u542F\u7528 GC:"},
{"grouping begin character", "{"},
{"grouping end character", "}"},
{"Illegal Argument Exception", "\u975E\u6CD5\u53C2\u6570\u5F02\u5E38\u9519\u8BEF"},
{"Illegal connector argument", "\u975E\u6CD5\u8FDE\u63A5\u5668\u53C2\u6570: {0}"},
{"implementor:", "\u5B9E\u73B0\u8005: {0}"},
{"implements:", "\u5B9E\u73B0: {0}"},
{"Initializing progname", "\u6B63\u5728\u521D\u59CB\u5316{0}..."},
{"Input stream closed.", "\u8F93\u5165\u6D41\u5DF2\u5173\u95ED\u3002"},
{"Interface:", "\u63A5\u53E3: {0}"},
{"Internal debugger error.", "\u5185\u90E8\u8C03\u8BD5\u5668\u9519\u8BEF\u3002"},
{"Internal error: null ThreadInfo created", "\u5185\u90E8\u9519\u8BEF: \u521B\u5EFA\u4E86\u7A7A\u503C ThreadInfo"},
{"Internal error; unable to set", "\u5185\u90E8\u9519\u8BEF; \u65E0\u6CD5\u8BBE\u7F6E{0}"},
{"Internal exception during operation:", "\u64CD\u4F5C\u671F\u95F4\u51FA\u73B0\u5185\u90E8\u5F02\u5E38\u9519\u8BEF:\n {0}"},
{"Internal exception:", "\u5185\u90E8\u5F02\u5E38\u9519\u8BEF:"},
{"Invalid argument type name", "\u53C2\u6570\u7C7B\u578B\u540D\u79F0\u65E0\u6548"},
{"Invalid assignment syntax", "\u8D4B\u503C\u8BED\u6CD5\u65E0\u6548"},
{"Invalid command syntax", "\u547D\u4EE4\u8BED\u6CD5\u65E0\u6548"},
{"Invalid connect type", "\u8FDE\u63A5\u7C7B\u578B\u65E0\u6548"},
{"Invalid consecutive invocations", "\u8FDE\u7EED\u8C03\u7528\u65E0\u6548"},
{"Invalid exception object", "\u5F02\u5E38\u9519\u8BEF\u5BF9\u8C61\u65E0\u6548"},
{"Invalid method specification:", "\u65B9\u6CD5\u89C4\u8303\u65E0\u6548: {0}"},
{"Invalid option on class command", "\u7C7B\u547D\u4EE4\u7684\u9009\u9879\u65E0\u6548"},
{"invalid option", "\u9009\u9879\u65E0\u6548: {0}"},
{"Invalid thread status.", "\u7EBF\u7A0B\u72B6\u6001\u65E0\u6548\u3002"},
{"Invalid transport name:", "\u4F20\u8F93\u540D\u79F0\u65E0\u6548: {0}"},
{"I/O exception occurred:", "\u51FA\u73B0 I/O \u5F02\u5E38\u9519\u8BEF: {0}"},
{"is an ambiguous method name in", "\"{0}\" \u5728 \"{1}\" \u4E2D\u662F\u4E0D\u660E\u786E\u7684\u65B9\u6CD5\u540D\u79F0"},
{"is an invalid line number for", "{0,number,integer} \u662F{1}\u7684\u65E0\u6548\u884C\u53F7"},
{"is not a valid class name", "\"{0}\" \u4E0D\u662F\u6709\u6548\u7684\u7C7B\u540D\u3002"},
{"is not a valid field name", "\"{0}\" \u4E0D\u662F\u6709\u6548\u7684\u5B57\u6BB5\u540D\u3002"},
{"is not a valid id or class name", "\"{0}\" \u4E0D\u662F\u6709\u6548\u7684 ID \u6216\u7C7B\u540D\u3002"},
{"is not a valid line number or method name for", "\"{0}\" \u4E0D\u662F\u7C7B \"{1}\" \u7684\u6709\u6548\u884C\u53F7\u6216\u65B9\u6CD5\u540D"},
{"is not a valid method name", "\"{0}\" \u4E0D\u662F\u6709\u6548\u7684\u65B9\u6CD5\u540D\u3002"},
{"is not a valid thread id", "\"{0}\" \u4E0D\u662F\u6709\u6548\u7684\u7EBF\u7A0B ID\u3002"},
{"is not a valid threadgroup name", "\"{0}\" \u4E0D\u662F\u6709\u6548\u7684\u7EBF\u7A0B\u7EC4\u540D\u79F0\u3002"},
{"jdb prompt with no current thread", "> "},
{"jdb prompt thread name and current stack frame", "{0}[{1,number,integer}] "},
{"killed", "{0}\u5DF2\u7EC8\u6B62"},
{"killing thread:", "\u6B63\u5728\u7EC8\u6B62\u7EBF\u7A0B: {0}"},
{"Line number information not available for", "\u6B64\u4F4D\u7F6E\u7684\u6E90\u884C\u53F7\u4E0D\u53EF\u7528\u3002"},
{"line number", ":{0,number,integer}"},
{"list field typename and name", "{0} {1}\n"},
{"list field typename and name inherited", "{0} {1} (\u7EE7\u627F\u81EA{2})\n"},
{"list field typename and name hidden", "{0} {1} (\u9690\u85CF)\n"},
{"Listening at address:", "\u76D1\u542C\u5730\u5740: {0}"},
{"Local variable information not available.", "\u672C\u5730\u53D8\u91CF\u4FE1\u606F\u4E0D\u53EF\u7528\u3002\u8BF7\u4F7F\u7528 -g \u7F16\u8BD1\u4EE5\u751F\u6210\u53D8\u91CF\u4FE1\u606F"},
{"Local variables:", "\u672C\u5730\u53D8\u91CF:"},
{"<location unavailable>", "<\u4F4D\u7F6E\u4E0D\u53EF\u7528>"},
{"location", "\"\u7EBF\u7A0B={0}\", {1}"},
{"locationString", "{0}.{1}(), \u884C={2,number,integer} bci={3,number,integer}"},
{"Main class and arguments must be specified", "\u5FC5\u987B\u6307\u5B9A\u4E3B\u7C7B\u548C\u53C2\u6570"},
{"Method arguments:", "\u65B9\u6CD5\u53C2\u6570:"},
{"Method entered:", "\u5DF2\u8FDB\u5165\u65B9\u6CD5: "},
{"Method exited:", "\u5DF2\u9000\u51FA\u65B9\u6CD5"},
{"Method exitedValue:", "\u5DF2\u9000\u51FA\u65B9\u6CD5: \u8FD4\u56DE\u503C = {0}, "},
{"Method is overloaded; specify arguments", "\u5DF2\u91CD\u8F7D\u65B9\u6CD5{0}; \u8BF7\u6307\u5B9A\u53C2\u6570"},
{"minus version", "\u8FD9\u662F{0}\u7248\u672C {1,number,integer}.{2,number,integer} (Java SE \u7248\u672C {3})"},
{"Monitor information for thread", "\u76D1\u89C6\u7EBF\u7A0B{0}\u7684\u4FE1\u606F:"},
{"Monitor information for expr", "\u76D1\u89C6{0} ({1}) \u7684\u4FE1\u606F:"},
{"More than one class named", "\u591A\u4E2A\u7C7B\u7684\u540D\u79F0\u4E3A: ''{0}''"},
{"native method", "\u672C\u673A\u65B9\u6CD5"},
{"nested:", "\u5D4C\u5957: {0}"},
{"No attach address specified.", "\u672A\u6307\u5B9A\u9644\u52A0\u5730\u5740\u3002"},
{"No breakpoints set.", "\u672A\u8BBE\u7F6E\u65AD\u70B9\u3002"},
{"No class named", "\u6CA1\u6709\u540D\u4E3A ''{0}'' \u7684\u7C7B"},
{"No class specified.", "\u672A\u6307\u5B9A\u7C7B\u3002"},
{"No classpath specified.", "\u672A\u6307\u5B9A\u7C7B\u8DEF\u5F84\u3002"},
{"No code at line", "{1}\u4E2D\u7684\u884C {0,number,integer} \u5904\u6CA1\u6709\u4EE3\u7801"},
{"No connect specification.", "\u6CA1\u6709\u8FDE\u63A5\u89C4\u8303\u3002"},
{"No connector named:", "\u6CA1\u6709\u540D\u4E3A{0}\u7684\u8FDE\u63A5\u5668"},
{"No current thread", "\u6CA1\u6709\u5F53\u524D\u7EBF\u7A0B"},
{"No default thread specified:", "\u672A\u6307\u5B9A\u9ED8\u8BA4\u7EBF\u7A0B: \u8BF7\u5148\u4F7F\u7528 \"thread\" \u547D\u4EE4\u3002"},
{"No exception object specified.", "\u672A\u6307\u5B9A\u5F02\u5E38\u9519\u8BEF\u5BF9\u8C61\u3002"},
{"No exceptions caught.", "\u672A\u6355\u83B7\u5230\u5F02\u5E38\u9519\u8BEF\u3002"},
{"No expression specified.", "\u672A\u6307\u5B9A\u8868\u8FBE\u5F0F\u3002"},
{"No field in", "{1}\u4E2D\u6CA1\u6709\u5B57\u6BB5{0}"},
{"No frames on the current call stack", "\u5F53\u524D\u8C03\u7528\u5806\u6808\u4E0A\u6CA1\u6709\u5E27"},
{"No linenumber information for", "{0}\u6CA1\u6709\u884C\u53F7\u4FE1\u606F\u3002\u8BF7\u5C1D\u8BD5\u5728\u542F\u7528\u8C03\u8BD5\u7684\u60C5\u51B5\u4E0B\u7F16\u8BD1\u3002"},
{"No local variables", "\u6CA1\u6709\u672C\u5730\u53D8\u91CF"},
{"No method in", "{1}\u4E2D\u6CA1\u6709\u65B9\u6CD5{0}"},
{"No method specified.", "\u672A\u6307\u5B9A\u65B9\u6CD5\u3002"},
{"No monitor numbered:", "\u6CA1\u6709\u7F16\u53F7\u4E3A {0} \u7684\u76D1\u89C6\u5668"},
{"No monitors owned", " \u4E0D\u62E5\u6709\u76D1\u89C6\u5668"},
{"No object specified.", "\u672A\u6307\u5B9A\u5BF9\u8C61\u3002"},
{"No objects specified.", "\u672A\u6307\u5B9A\u5BF9\u8C61\u3002"},
{"No save index specified.", "\u672A\u6307\u5B9A\u4FDD\u5B58\u7D22\u5F15\u3002"},
{"No saved values", "\u6CA1\u6709\u4FDD\u5B58\u7684\u503C"},
{"No source information available for:", "\u6CA1\u6709\u53EF\u7528\u4E8E{0}\u7684\u6E90\u4FE1\u606F"},
{"No sourcedebugextension specified", "\u672A\u6307\u5B9A SourceDebugExtension"},
{"No sourcepath specified.", "\u672A\u6307\u5B9A\u6E90\u8DEF\u5F84\u3002"},
{"No thread specified.", "\u672A\u6307\u5B9A\u7EBF\u7A0B\u3002"},
{"No VM connected", "\u672A\u8FDE\u63A5 VM"},
{"No waiters", " \u6CA1\u6709\u7B49\u5F85\u8FDB\u7A0B"},
{"not a class", "{0}\u4E0D\u662F\u7C7B"},
{"Not a monitor number:", "\u4E0D\u662F\u76D1\u89C6\u5668\u7F16\u53F7: ''{0}''"},
{"not found (try the full name)", "\u627E\u4E0D\u5230{0} (\u8BF7\u5C1D\u8BD5\u4F7F\u7528\u5168\u540D)"},
{"Not found:", "\u627E\u4E0D\u5230: {0}"},
{"not found", "\u627E\u4E0D\u5230{0}"},
{"Not owned", " \u4E0D\u62E5\u6709"},
{"Not waiting for a monitor", " \u672A\u7B49\u5F85\u76D1\u89C6\u5668"},
{"Nothing suspended.", "\u672A\u6302\u8D77\u4EFB\u4F55\u5BF9\u8C61\u3002"},
{"object description and hex id", "({0}){1}"},
{"Operation is not supported on the target VM", "\u76EE\u6807 VM \u4E0D\u652F\u6301\u8BE5\u64CD\u4F5C"},
{"operation not yet supported", "\u5C1A\u4E0D\u652F\u6301\u8BE5\u64CD\u4F5C"},
{"Owned by:", " \u62E5\u6709\u8005: {0}, \u6761\u76EE\u8BA1\u6570: {1,number,integer}"},
{"Owned monitor:", " \u62E5\u6709\u7684\u76D1\u89C6\u5668: {0}"},
{"Parse exception:", "\u89E3\u6790\u5F02\u5E38\u9519\u8BEF: {0}"},
{"printbreakpointcommandusage", "\u7528\u6CD5: {0} <class>:<line_number> \u6216\n {1} <class>.<method_name>[(argument_type,...)]"},
{"Removed:", "\u5DF2\u5220\u9664: {0}"},
{"Requested stack frame is no longer active:", "\u8BF7\u6C42\u7684\u5806\u6808\u5E27\u4E0D\u518D\u6709\u6548: {0,number,integer}"},
{"run <args> command is valid only with launched VMs", "'run <args>' \u547D\u4EE4\u4EC5\u5BF9\u542F\u52A8\u7684 VM \u6709\u6548"},
{"run", "\u8FD0\u884C{0}"},
{"saved", "{0}\u5DF2\u4FDD\u5B58"},
{"Set deferred", "\u8BBE\u7F6E\u5EF6\u8FDF\u7684{0}"},
{"Set", "\u8BBE\u7F6E{0}"},
{"Source file not found:", "\u627E\u4E0D\u5230\u6E90\u6587\u4EF6: {0}"},
{"source line number and line", "{0,number,integer} {1}"},
{"source line number current line and line", "{0,number,integer} => {1}"},
{"sourcedebugextension", "SourceDebugExtension -- {0}"},
{"Specify class and method", "\u6307\u5B9A\u7C7B\u548C\u65B9\u6CD5"},
{"Specify classes to redefine", "\u6307\u5B9A\u8981\u91CD\u65B0\u5B9A\u4E49\u7684\u7C7B"},
{"Specify file name for class", "\u6307\u5B9A\u7C7B{0}\u7684\u6587\u4EF6\u540D"},
{"stack frame dump with pc", " [{0,number,integer}] {1}.{2} ({3}), pc = {4}"},
{"stack frame dump", " [{0,number,integer}] {1}.{2} ({3})"},
{"Step completed:", "\u5DF2\u5B8C\u6210\u7684\u6B65\u9AA4: "},
{"Stopping due to deferred breakpoint errors.", "\u7531\u4E8E\u5EF6\u8FDF\u65AD\u70B9\u9519\u8BEF\u800C\u505C\u6B62\u3002\n"},
{"subclass:", "\u5B50\u7C7B: {0}"},
{"subinterface:", "\u5B50\u63A5\u53E3: {0}"},
{"tab", "\t{0}"},
{"Target VM failed to initialize.", "\u65E0\u6CD5\u521D\u59CB\u5316\u76EE\u6807 VM\u3002"},
{"The application exited", "\u5E94\u7528\u7A0B\u5E8F\u5DF2\u9000\u51FA"},
{"The application has been disconnected", "\u5E94\u7528\u7A0B\u5E8F\u5DF2\u65AD\u5F00\u8FDE\u63A5"},
{"The gc command is no longer necessary.", "\u4E0D\u518D\u9700\u8981 'gc' \u547D\u4EE4\u3002\n\u6240\u6709\u5BF9\u8C61\u5DF2\u7167\u5E38\u8FDB\u884C\u5783\u573E\u6536\u96C6\u3002\u8BF7\u4F7F\u7528 'enablegc' \u548C 'disablegc'\n\u547D\u4EE4\u6765\u63A7\u5236\u5404\u4E2A\u5BF9\u8C61\u7684\u5783\u573E\u6536\u96C6\u3002"},
{"The load command is no longer supported.", "\u4E0D\u518D\u652F\u6301 'load' \u547D\u4EE4\u3002"},
{"The memory command is no longer supported.", "\u4E0D\u518D\u652F\u6301 'memory' \u547D\u4EE4\u3002"},
{"The VM does not use paths", "VM \u4E0D\u4F7F\u7528\u8DEF\u5F84"},
{"Thread is not running (no stack).", "\u7EBF\u7A0B\u672A\u8FD0\u884C (\u6CA1\u6709\u5806\u6808)\u3002"},
{"Thread number not specified.", "\u672A\u6307\u5B9A\u7EBF\u7A0B\u7F16\u53F7\u3002"},
{"Thread:", "{0}:"},
{"Thread Group:", "\u7EC4{0}:"},
{"Thread description name unknownStatus BP", " {0} {1}\u672A\u77E5 (\u5728\u65AD\u70B9\u5904)"},
{"Thread description name unknownStatus", " {0} {1}\u672A\u77E5"},
{"Thread description name zombieStatus BP", " {0} {1}\u5904\u4E8E\u50F5\u6B7B\u72B6\u6001 (\u5728\u65AD\u70B9\u5904)"},
{"Thread description name zombieStatus", " {0} {1}\u5904\u4E8E\u50F5\u6B7B\u72B6\u6001"},
{"Thread description name runningStatus BP", " {0} {1}\u6B63\u5728\u8FD0\u884C (\u5728\u65AD\u70B9\u5904)"},
{"Thread description name runningStatus", " {0} {1}\u6B63\u5728\u8FD0\u884C"},
{"Thread description name sleepingStatus BP", " {0} {1}\u6B63\u5728\u4F11\u7720 (\u5728\u65AD\u70B9\u5904)"},
{"Thread description name sleepingStatus", " {0} {1}\u6B63\u5728\u4F11\u7720"},
{"Thread description name waitingStatus BP", " {0} {1}\u6B63\u5728\u7B49\u5F85\u76D1\u89C6\u5668 (\u5728\u65AD\u70B9\u5904)"},
{"Thread description name waitingStatus", " {0} {1}\u6B63\u5728\u7B49\u5F85\u76D1\u89C6\u5668"},
{"Thread description name condWaitstatus BP", " {0} {1}\u6B63\u5728\u6267\u884C\u6761\u4EF6\u7B49\u5F85 (\u5728\u65AD\u70B9\u5904)"},
{"Thread description name condWaitstatus", " {0} {1}\u6B63\u5728\u6267\u884C\u6761\u4EF6\u7B49\u5F85"},
{"Thread has been resumed", "\u5DF2\u6062\u590D\u7EBF\u7A0B"},
{"Thread not suspended", "\u672A\u6302\u8D77\u7EBF\u7A0B"},
{"thread group number description name", "{0,number,integer}\u3002{1} {2}"},
{"Threadgroup name not specified.", "\u672A\u6307\u5B9A\u7EBF\u7A0B\u7EC4\u540D\u3002"},
{"Threads must be suspended", "\u5FC5\u987B\u6302\u8D77\u7EBF\u7A0B"},
{"trace method exit in effect for", "\u6B63\u5728\u5BF9{0}\u5B9E\u884C trace method exit"},
{"trace method exits in effect", "\u6B63\u5728\u5B9E\u884C trace method exits"},
{"trace methods in effect", "\u6B63\u5728\u5B9E\u884C trace methods"},
{"trace go method exit in effect for", "\u6B63\u5728\u5BF9{0}\u5B9E\u884C trace go method exit"},
{"trace go method exits in effect", "\u6B63\u5728\u5B9E\u884C trace go method exits"},
{"trace go methods in effect", "\u6B63\u5728\u5B9E\u884C trace go methods"},
{"trace not in effect", "\u672A\u5B9E\u884C trace"},
{"Unable to attach to target VM.", "\u65E0\u6CD5\u9644\u52A0\u5230\u76EE\u6807 VM\u3002"},
{"Unable to display process output:", "\u65E0\u6CD5\u663E\u793A\u8FDB\u7A0B\u8F93\u51FA: {0}"},
{"Unable to launch target VM.", "\u65E0\u6CD5\u542F\u52A8\u76EE\u6807 VM\u3002"},
{"Unable to set deferred", "\u65E0\u6CD5\u8BBE\u7F6E\u5EF6\u8FDF\u7684{0}: {1}"},
{"Unable to set main class and arguments", "\u65E0\u6CD5\u8BBE\u7F6E\u4E3B\u7C7B\u548C\u53C2\u6570"},
{"Unable to set", "\u65E0\u6CD5\u8BBE\u7F6E{0}: {1}"},
{"Unexpected event type", "\u610F\u5916\u7684\u4E8B\u4EF6\u7C7B\u578B: {0}"},
{"unknown", "\u672A\u77E5"},
{"Unmonitoring", "\u53D6\u6D88\u76D1\u89C6{0} "},
{"Unrecognized command. Try help...", "\u65E0\u6CD5\u8BC6\u522B\u7684\u547D\u4EE4: ''{0}''\u3002\u8BF7\u5C1D\u8BD5\u83B7\u5F97\u5E2E\u52A9..."},
{"Usage: catch exception", "\u7528\u6CD5: catch [uncaught|caught|all] <class id>|<class pattern>"},
{"Usage: ignore exception", "\u7528\u6CD5: ignore [uncaught|caught|all] <class id>|<class pattern>"},
{"Usage: down [n frames]", "\u7528\u6CD5: down [n frames]"},
{"Usage: kill <thread id> <throwable>", "\u7528\u6CD5: kill <thread id> <throwable>"},
{"Usage: read <command-filename>", "\u7528\u6CD5: read <command-filename>"},
{"Usage: unmonitor <monitor#>", "\u7528\u6CD5: unmonitor <monitor#>"},
{"Usage: up [n frames]", "\u7528\u6CD5: up [n frames]"},
{"Use java minus X to see", "\u4F7F\u7528 'java -X' \u53EF\u4EE5\u67E5\u770B\u53EF\u7528\u7684\u975E\u6807\u51C6\u9009\u9879"},
{"Use stop at to set a breakpoint at a line number", "\u4F7F\u7528 'stop at' \u53EF\u4EE5\u5728\u884C\u53F7\u5904\u8BBE\u7F6E\u65AD\u70B9"},
{"VM already running. use cont to continue after events.", "VM \u5DF2\u5728\u8FD0\u884C\u3002\u8BF7\u4F7F\u7528 'cont' \u4EE5\u5728\u4E8B\u4EF6\u7ED3\u675F\u540E\u7EE7\u7EED\u3002"},
{"VM Started:", "VM \u5DF2\u542F\u52A8: "},
{"vmstartexception", "VM \u542F\u52A8\u5F02\u5E38\u9519\u8BEF: {0}"},
{"Waiting for monitor:", " \u6B63\u5728\u7B49\u5F85\u76D1\u89C6\u5668: {0}"},
{"Waiting thread:", " \u6B63\u5728\u7B49\u5F85\u7EBF\u7A0B: {0}"},
{"watch accesses of", "\u76D1\u89C6{0}.{1}\u7684\u8BBF\u95EE"},
{"watch modification of", "\u76D1\u89C6{0}.{1}\u7684\u4FEE\u6539"},
{"zz help text",
"** \u547D\u4EE4\u5217\u8868 **\nconnectors -- \u5217\u51FA\u6B64 VM \u4E2D\u53EF\u7528\u7684\u8FDE\u63A5\u5668\u548C\u4F20\u8F93\n\nrun [class [args]] -- \u5F00\u59CB\u6267\u884C\u5E94\u7528\u7A0B\u5E8F\u7684\u4E3B\u7C7B\n\nthreads [threadgroup] -- \u5217\u51FA\u7EBF\u7A0B\nthread <thread id> -- \u8BBE\u7F6E\u9ED8\u8BA4\u7EBF\u7A0B\nsuspend [thread id(s)] -- \u6302\u8D77\u7EBF\u7A0B (\u9ED8\u8BA4\u503C: all)\nresume [thread id(s)] -- \u6062\u590D\u7EBF\u7A0B (\u9ED8\u8BA4\u503C: all)\nwhere [<thread id> | all] -- \u8F6C\u50A8\u7EBF\u7A0B\u7684\u5806\u6808\nwherei [<thread id> | all]-- \u8F6C\u50A8\u7EBF\u7A0B\u7684\u5806\u6808, \u4EE5\u53CA pc \u4FE1\u606F\nup [n frames] -- \u4E0A\u79FB\u7EBF\u7A0B\u7684\u5806\u6808\ndown [n frames] -- \u4E0B\u79FB\u7EBF\u7A0B\u7684\u5806\u6808\nkill <thread id> <expr> -- \u7EC8\u6B62\u5177\u6709\u7ED9\u5B9A\u7684\u5F02\u5E38\u9519\u8BEF\u5BF9\u8C61\u7684\u7EBF\u7A0B\ninterrupt <thread id> -- \u4E2D\u65AD\u7EBF\u7A0B\n\nprint <expr> -- \u8F93\u51FA\u8868\u8FBE\u5F0F\u7684\u503C\ndump <expr> -- \u8F93\u51FA\u6240\u6709\u5BF9\u8C61\u4FE1\u606F\neval <expr> -- \u5BF9\u8868\u8FBE\u5F0F\u6C42\u503C (\u4E0E print \u76F8\u540C)\nset <lvalue> = <expr> -- \u5411\u5B57\u6BB5/\u53D8\u91CF/\u6570\u7EC4\u5143\u7D20\u5206\u914D\u65B0\u503C\nlocals -- \u8F93\u51FA\u5F53\u524D\u5806\u6808\u5E27\u4E2D\u7684\u6240\u6709\u672C\u5730\u53D8\u91CF\n\nclasses -- \u5217\u51FA\u5F53\u524D\u5DF2\u77E5\u7684\u7C7B\nclass <class id> -- \u663E\u793A\u5DF2\u547D\u540D\u7C7B\u7684\u8BE6\u7EC6\u8D44\u6599\nmethods <class id> -- \u5217\u51FA\u7C7B\u7684\u65B9\u6CD5\nfields <class id> -- \u5217\u51FA\u7C7B\u7684\u5B57\u6BB5\n\nthreadgroups -- \u5217\u51FA\u7EBF\u7A0B\u7EC4\nthreadgroup <name> -- \u8BBE\u7F6E\u5F53\u524D\u7EBF\u7A0B\u7EC4\n\nstop in <class id>.<method>[(argument_type,...)]\n -- \u5728\u65B9\u6CD5\u4E2D\u8BBE\u7F6E\u65AD\u70B9\nstop at <class id>:<line> -- \u5728\u884C\u4E2D\u8BBE\u7F6E\u65AD\u70B9\nclear <class id>.<method>[(argument_type,...)]\n -- \u6E05\u9664\u65B9\u6CD5\u4E2D\u7684\u65AD\u70B9\nclear <class id>:<line> -- \u6E05\u9664\u884C\u4E2D\u7684\u65AD\u70B9\nclear -- \u5217\u51FA\u65AD\u70B9\ncatch [uncaught|caught|all] <class id>|<class pattern>\n -- \u51FA\u73B0\u6307\u5B9A\u7684\u5F02\u5E38\u9519\u8BEF\u65F6\u4E2D\u65AD\nignore [uncaught|caught|all] <class id>|<class pattern>\n -- \u5BF9\u4E8E\u6307\u5B9A\u7684\u5F02\u5E38\u9519\u8BEF, \u53D6\u6D88 'catch'\nwatch [access|all] <class id>.<field name>\n -- \u76D1\u89C6\u5BF9\u5B57\u6BB5\u7684\u8BBF\u95EE/\u4FEE\u6539\nunwatch [access|all] <class id>.<field name>\n -- \u505C\u6B62\u76D1\u89C6\u5BF9\u5B57\u6BB5\u7684\u8BBF\u95EE/\u4FEE\u6539\ntrace [go] methods [thread]\n -- \u8DDF\u8E2A\u65B9\u6CD5\u8FDB\u5165\u548C\u9000\u51FA\u3002\n -- \u9664\u975E\u6307\u5B9A 'go', \u5426\u5219\u6302\u8D77\u6240\u6709\u7EBF\u7A0B\ntrace [go] method exit | exits [thread]\n -- \u8DDF\u8E2A\u5F53\u524D\u65B9\u6CD5\u7684\u9000\u51FA, \u6216\u8005\u6240\u6709\u65B9\u6CD5\u7684\u9000\u51FA\n -- \u9664\u975E\u6307\u5B9A 'go', \u5426\u5219\u6302\u8D77\u6240\u6709\u7EBF\u7A0B\nuntrace [methods] -- \u505C\u6B62\u8DDF\u8E2A\u65B9\u6CD5\u8FDB\u5165\u548C/\u6216\u9000\u51FA\nstep -- \u6267\u884C\u5F53\u524D\u884C\nstep up -- \u4E00\u76F4\u6267\u884C, \u76F4\u5230\u5F53\u524D\u65B9\u6CD5\u8FD4\u56DE\u5230\u5176\u8C03\u7528\u65B9\nstepi -- \u6267\u884C\u5F53\u524D\u6307\u4EE4\n\u4E0B\u4E00\u6B65 -- \u6B65\u8FDB\u4E00\u884C (\u6B65\u8FC7\u8C03\u7528)\ncont -- \u4ECE\u65AD\u70B9\u5904\u7EE7\u7EED\u6267\u884C\n\nlist [line number|method] -- \u8F93\u51FA\u6E90\u4EE3\u7801\nuse (\u6216 sourcepath) [source file path]\n -- \u663E\u793A\u6216\u66F4\u6539\u6E90\u8DEF\u5F84\nexclude [<class pattern>, ... | \"none\"]\n -- \u5BF9\u4E8E\u6307\u5B9A\u7684\u7C7B, \u4E0D\u62A5\u544A\u6B65\u9AA4\u6216\u65B9\u6CD5\u4E8B\u4EF6\nclasspath -- \u4ECE\u76EE\u6807 VM \u8F93\u51FA\u7C7B\u8DEF\u5F84\u4FE1\u606F\n\nmonitor <command> -- \u6BCF\u6B21\u7A0B\u5E8F\u505C\u6B62\u65F6\u6267\u884C\u547D\u4EE4\nmonitor -- \u5217\u51FA\u76D1\u89C6\u5668\nunmonitor <monitor#> -- \u5220\u9664\u76D1\u89C6\u5668\nread <filename> -- \u8BFB\u53D6\u5E76\u6267\u884C\u547D\u4EE4\u6587\u4EF6\n\nlock <expr> -- \u8F93\u51FA\u5BF9\u8C61\u7684\u9501\u4FE1\u606F\nthreadlocks [thread id] -- \u8F93\u51FA\u7EBF\u7A0B\u7684\u9501\u4FE1\u606F\n\npop -- \u901A\u8FC7\u5F53\u524D\u5E27\u51FA\u6808, \u4E14\u5305\u542B\u5F53\u524D\u5E27\nreenter -- \u4E0E pop \u76F8\u540C, \u4F46\u91CD\u65B0\u8FDB\u5165\u5F53\u524D\u5E27\nredefine <class id> <class file name>\n -- \u91CD\u65B0\u5B9A\u4E49\u7C7B\u7684\u4EE3\u7801\n\ndisablegc <expr> -- \u7981\u6B62\u5BF9\u8C61\u7684\u5783\u573E\u6536\u96C6\nenablegc <expr> -- \u5141\u8BB8\u5BF9\u8C61\u7684\u5783\u573E\u6536\u96C6\n\n!! -- \u91CD\u590D\u6267\u884C\u6700\u540E\u4E00\u4E2A\u547D\u4EE4\n<n> <command> -- \u5C06\u547D\u4EE4\u91CD\u590D\u6267\u884C n \u6B21\n# <command> -- \u653E\u5F03 (\u65E0\u64CD\u4F5C)\nhelp (\u6216 ?) -- \u5217\u51FA\u547D\u4EE4\nversion -- \u8F93\u51FA\u7248\u672C\u4FE1\u606F\nexit (\u6216 quit) -- \u9000\u51FA\u8C03\u8BD5\u5668\n\n<class id>: \u5E26\u6709\u7A0B\u5E8F\u5305\u9650\u5B9A\u7B26\u7684\u5B8C\u6574\u7C7B\u540D\n<class pattern>: \u5E26\u6709\u524D\u5BFC\u6216\u5C3E\u968F\u901A\u914D\u7B26 ('*') \u7684\u7C7B\u540D\n<thread id>: 'threads' \u547D\u4EE4\u4E2D\u62A5\u544A\u7684\u7EBF\u7A0B\u7F16\u53F7\n<expr>: Java(TM) \u7F16\u7A0B\u8BED\u8A00\u8868\u8FBE\u5F0F\u3002\n\u652F\u6301\u5927\u591A\u6570\u5E38\u89C1\u8BED\u6CD5\u3002\n\n\u53EF\u4EE5\u5C06\u542F\u52A8\u547D\u4EE4\u7F6E\u4E8E \"jdb.ini\" \u6216 \".jdbrc\" \u4E2D\n\u4F4D\u4E8E user.home \u6216 user.dir \u4E2D"},
{"zz usage text",
"\u7528\u6CD5: {0} <options> <class> <arguments>\n\n\u5176\u4E2D, \u9009\u9879\u5305\u62EC:\n -help \u8F93\u51FA\u6B64\u6D88\u606F\u5E76\u9000\u51FA\n -sourcepath <\u7531 \"{1}\" \u5206\u9694\u7684\u76EE\u5F55>\n \u8981\u5728\u5176\u4E2D\u67E5\u627E\u6E90\u6587\u4EF6\u7684\u76EE\u5F55\n -attach <address>\n \u4F7F\u7528\u6807\u51C6\u8FDE\u63A5\u5668\u9644\u52A0\u5230\u6307\u5B9A\u5730\u5740\u5904\u6B63\u5728\u8FD0\u884C\u7684 VM\n -listen <address>\n \u7B49\u5F85\u6B63\u5728\u8FD0\u884C\u7684 VM \u4F7F\u7528\u6807\u51C6\u8FDE\u63A5\u5668\u5728\u6307\u5B9A\u5730\u5740\u5904\u8FDE\u63A5\n -listenany\n \u7B49\u5F85\u6B63\u5728\u8FD0\u884C\u7684 VM \u4F7F\u7528\u6807\u51C6\u8FDE\u63A5\u5668\u5728\u4EFB\u4F55\u53EF\u7528\u5730\u5740\u5904\u8FDE\u63A5\n -launch\n \u7ACB\u5373\u542F\u52A8 VM \u800C\u4E0D\u662F\u7B49\u5F85 ''run'' \u547D\u4EE4\n -listconnectors \u5217\u51FA\u6B64 VM \u4E2D\u7684\u53EF\u7528\u8FDE\u63A5\u5668\n -connect <connector-name>:<name1>=<value1>,...\n \u4F7F\u7528\u6240\u5217\u53C2\u6570\u503C\u901A\u8FC7\u6307\u5B9A\u7684\u8FDE\u63A5\u5668\u8FDE\u63A5\u5230\u76EE\u6807 VM\n -dbgtrace [flags] \u8F93\u51FA\u4FE1\u606F\u4F9B\u8C03\u8BD5{0}\n -tclient \u5728 HotSpot(TM) \u5BA2\u6237\u673A\u7F16\u8BD1\u5668\u4E2D\u8FD0\u884C\u5E94\u7528\u7A0B\u5E8F\n -tserver \u5728 HotSpot(TM) \u670D\u52A1\u5668\u7F16\u8BD1\u5668\u4E2D\u8FD0\u884C\u5E94\u7528\u7A0B\u5E8F\n\n\u8F6C\u53D1\u5230\u88AB\u8C03\u8BD5\u8FDB\u7A0B\u7684\u9009\u9879:\n -v -verbose[:class|gc|jni]\n \u542F\u7528\u8BE6\u7EC6\u6A21\u5F0F\n -D<name>=<value> \u8BBE\u7F6E\u7CFB\u7EDF\u5C5E\u6027\n -classpath <\u7531 \"{1}\" \u5206\u9694\u7684\u76EE\u5F55>\n \u5217\u51FA\u8981\u5728\u5176\u4E2D\u67E5\u627E\u7C7B\u7684\u76EE\u5F55\n -X<option> \u975E\u6807\u51C6\u76EE\u6807 VM \u9009\u9879\n\n<class> \u662F\u8981\u5F00\u59CB\u8C03\u8BD5\u7684\u7C7B\u7684\u540D\u79F0\n<arguments> \u662F\u4F20\u9012\u5230 <class> \u7684 main() \u65B9\u6CD5\u7684\u53C2\u6570\n\n\u8981\u83B7\u5F97\u547D\u4EE4\u7684\u5E2E\u52A9, \u8BF7\u5728{0}\u63D0\u793A\u4E0B\u952E\u5165 ''help''"},
// END OF MATERIAL TO LOCALIZE
};
return temp;
}
}
|
googleapis/google-cloud-java | 37,483 | java-bigqueryreservation/proto-google-cloud-bigqueryreservation-v1/src/main/java/com/google/cloud/bigquery/reservation/v1/CreateReservationRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/reservation/v1/reservation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.reservation.v1;
/**
*
*
* <pre>
* The request for
* [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation].
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.CreateReservationRequest}
*/
public final class CreateReservationRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.reservation.v1.CreateReservationRequest)
CreateReservationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateReservationRequest.newBuilder() to construct.
private CreateReservationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateReservationRequest() {
parent_ = "";
reservationId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateReservationRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_CreateReservationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_CreateReservationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest.class,
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Project, location. E.g.,
* `projects/myproject/locations/US`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Project, location. E.g.,
* `projects/myproject/locations/US`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESERVATION_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object reservationId_ = "";
/**
*
*
* <pre>
* The reservation ID. It must only contain lower case alphanumeric
* characters or dashes. It must start with a letter and must not end
* with a dash. Its maximum length is 64 characters.
* </pre>
*
* <code>string reservation_id = 2;</code>
*
* @return The reservationId.
*/
@java.lang.Override
public java.lang.String getReservationId() {
java.lang.Object ref = reservationId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
reservationId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The reservation ID. It must only contain lower case alphanumeric
* characters or dashes. It must start with a letter and must not end
* with a dash. Its maximum length is 64 characters.
* </pre>
*
* <code>string reservation_id = 2;</code>
*
* @return The bytes for reservationId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getReservationIdBytes() {
java.lang.Object ref = reservationId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
reservationId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESERVATION_FIELD_NUMBER = 3;
private com.google.cloud.bigquery.reservation.v1.Reservation reservation_;
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*
* @return Whether the reservation field is set.
*/
@java.lang.Override
public boolean hasReservation() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*
* @return The reservation.
*/
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.Reservation getReservation() {
return reservation_ == null
? com.google.cloud.bigquery.reservation.v1.Reservation.getDefaultInstance()
: reservation_;
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.ReservationOrBuilder getReservationOrBuilder() {
return reservation_ == null
? com.google.cloud.bigquery.reservation.v1.Reservation.getDefaultInstance()
: reservation_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(reservationId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, reservationId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getReservation());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(reservationId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, reservationId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getReservation());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.reservation.v1.CreateReservationRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest other =
(com.google.cloud.bigquery.reservation.v1.CreateReservationRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getReservationId().equals(other.getReservationId())) return false;
if (hasReservation() != other.hasReservation()) return false;
if (hasReservation()) {
if (!getReservation().equals(other.getReservation())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getReservationId().hashCode();
if (hasReservation()) {
hash = (37 * hash) + RESERVATION_FIELD_NUMBER;
hash = (53 * hash) + getReservation().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request for
* [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation].
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.CreateReservationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.reservation.v1.CreateReservationRequest)
com.google.cloud.bigquery.reservation.v1.CreateReservationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_CreateReservationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_CreateReservationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest.class,
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest.Builder.class);
}
// Construct using
// com.google.cloud.bigquery.reservation.v1.CreateReservationRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getReservationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
reservationId_ = "";
reservation_ = null;
if (reservationBuilder_ != null) {
reservationBuilder_.dispose();
reservationBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_CreateReservationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.CreateReservationRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.reservation.v1.CreateReservationRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.CreateReservationRequest build() {
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.CreateReservationRequest buildPartial() {
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest result =
new com.google.cloud.bigquery.reservation.v1.CreateReservationRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.reservationId_ = reservationId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.reservation_ =
reservationBuilder_ == null ? reservation_ : reservationBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.reservation.v1.CreateReservationRequest) {
return mergeFrom((com.google.cloud.bigquery.reservation.v1.CreateReservationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.reservation.v1.CreateReservationRequest other) {
if (other
== com.google.cloud.bigquery.reservation.v1.CreateReservationRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getReservationId().isEmpty()) {
reservationId_ = other.reservationId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasReservation()) {
mergeReservation(other.getReservation());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
reservationId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getReservationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Project, location. E.g.,
* `projects/myproject/locations/US`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Project, location. E.g.,
* `projects/myproject/locations/US`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Project, location. E.g.,
* `projects/myproject/locations/US`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Project, location. E.g.,
* `projects/myproject/locations/US`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Project, location. E.g.,
* `projects/myproject/locations/US`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object reservationId_ = "";
/**
*
*
* <pre>
* The reservation ID. It must only contain lower case alphanumeric
* characters or dashes. It must start with a letter and must not end
* with a dash. Its maximum length is 64 characters.
* </pre>
*
* <code>string reservation_id = 2;</code>
*
* @return The reservationId.
*/
public java.lang.String getReservationId() {
java.lang.Object ref = reservationId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
reservationId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The reservation ID. It must only contain lower case alphanumeric
* characters or dashes. It must start with a letter and must not end
* with a dash. Its maximum length is 64 characters.
* </pre>
*
* <code>string reservation_id = 2;</code>
*
* @return The bytes for reservationId.
*/
public com.google.protobuf.ByteString getReservationIdBytes() {
java.lang.Object ref = reservationId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
reservationId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The reservation ID. It must only contain lower case alphanumeric
* characters or dashes. It must start with a letter and must not end
* with a dash. Its maximum length is 64 characters.
* </pre>
*
* <code>string reservation_id = 2;</code>
*
* @param value The reservationId to set.
* @return This builder for chaining.
*/
public Builder setReservationId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
reservationId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The reservation ID. It must only contain lower case alphanumeric
* characters or dashes. It must start with a letter and must not end
* with a dash. Its maximum length is 64 characters.
* </pre>
*
* <code>string reservation_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearReservationId() {
reservationId_ = getDefaultInstance().getReservationId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The reservation ID. It must only contain lower case alphanumeric
* characters or dashes. It must start with a letter and must not end
* with a dash. Its maximum length is 64 characters.
* </pre>
*
* <code>string reservation_id = 2;</code>
*
* @param value The bytes for reservationId to set.
* @return This builder for chaining.
*/
public Builder setReservationIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
reservationId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.bigquery.reservation.v1.Reservation reservation_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.reservation.v1.Reservation,
com.google.cloud.bigquery.reservation.v1.Reservation.Builder,
com.google.cloud.bigquery.reservation.v1.ReservationOrBuilder>
reservationBuilder_;
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*
* @return Whether the reservation field is set.
*/
public boolean hasReservation() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*
* @return The reservation.
*/
public com.google.cloud.bigquery.reservation.v1.Reservation getReservation() {
if (reservationBuilder_ == null) {
return reservation_ == null
? com.google.cloud.bigquery.reservation.v1.Reservation.getDefaultInstance()
: reservation_;
} else {
return reservationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
public Builder setReservation(com.google.cloud.bigquery.reservation.v1.Reservation value) {
if (reservationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reservation_ = value;
} else {
reservationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
public Builder setReservation(
com.google.cloud.bigquery.reservation.v1.Reservation.Builder builderForValue) {
if (reservationBuilder_ == null) {
reservation_ = builderForValue.build();
} else {
reservationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
public Builder mergeReservation(com.google.cloud.bigquery.reservation.v1.Reservation value) {
if (reservationBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& reservation_ != null
&& reservation_
!= com.google.cloud.bigquery.reservation.v1.Reservation.getDefaultInstance()) {
getReservationBuilder().mergeFrom(value);
} else {
reservation_ = value;
}
} else {
reservationBuilder_.mergeFrom(value);
}
if (reservation_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
public Builder clearReservation() {
bitField0_ = (bitField0_ & ~0x00000004);
reservation_ = null;
if (reservationBuilder_ != null) {
reservationBuilder_.dispose();
reservationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
public com.google.cloud.bigquery.reservation.v1.Reservation.Builder getReservationBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getReservationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
public com.google.cloud.bigquery.reservation.v1.ReservationOrBuilder getReservationOrBuilder() {
if (reservationBuilder_ != null) {
return reservationBuilder_.getMessageOrBuilder();
} else {
return reservation_ == null
? com.google.cloud.bigquery.reservation.v1.Reservation.getDefaultInstance()
: reservation_;
}
}
/**
*
*
* <pre>
* Definition of the new reservation to create.
* </pre>
*
* <code>.google.cloud.bigquery.reservation.v1.Reservation reservation = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.reservation.v1.Reservation,
com.google.cloud.bigquery.reservation.v1.Reservation.Builder,
com.google.cloud.bigquery.reservation.v1.ReservationOrBuilder>
getReservationFieldBuilder() {
if (reservationBuilder_ == null) {
reservationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.reservation.v1.Reservation,
com.google.cloud.bigquery.reservation.v1.Reservation.Builder,
com.google.cloud.bigquery.reservation.v1.ReservationOrBuilder>(
getReservation(), getParentForChildren(), isClean());
reservation_ = null;
}
return reservationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.reservation.v1.CreateReservationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.reservation.v1.CreateReservationRequest)
private static final com.google.cloud.bigquery.reservation.v1.CreateReservationRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.reservation.v1.CreateReservationRequest();
}
public static com.google.cloud.bigquery.reservation.v1.CreateReservationRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateReservationRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateReservationRequest>() {
@java.lang.Override
public CreateReservationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateReservationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateReservationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.CreateReservationRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,511 | java-domains/proto-google-cloud-domains-v1alpha2/src/main/java/com/google/cloud/domains/v1alpha2/ListRegistrationsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/domains/v1alpha2/domains.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.domains.v1alpha2;
/**
*
*
* <pre>
* Response for the `ListRegistrations` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1alpha2.ListRegistrationsResponse}
*/
public final class ListRegistrationsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.domains.v1alpha2.ListRegistrationsResponse)
ListRegistrationsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRegistrationsResponse.newBuilder() to construct.
private ListRegistrationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRegistrationsResponse() {
registrations_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRegistrationsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_ListRegistrationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_ListRegistrationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse.class,
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse.Builder.class);
}
public static final int REGISTRATIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.domains.v1alpha2.Registration> registrations_;
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.domains.v1alpha2.Registration> getRegistrationsList() {
return registrations_;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.domains.v1alpha2.RegistrationOrBuilder>
getRegistrationsOrBuilderList() {
return registrations_;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
@java.lang.Override
public int getRegistrationsCount() {
return registrations_.size();
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.domains.v1alpha2.Registration getRegistrations(int index) {
return registrations_.get(index);
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.domains.v1alpha2.RegistrationOrBuilder getRegistrationsOrBuilder(
int index) {
return registrations_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < registrations_.size(); i++) {
output.writeMessage(1, registrations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < registrations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, registrations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.domains.v1alpha2.ListRegistrationsResponse)) {
return super.equals(obj);
}
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse other =
(com.google.cloud.domains.v1alpha2.ListRegistrationsResponse) obj;
if (!getRegistrationsList().equals(other.getRegistrationsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRegistrationsCount() > 0) {
hash = (37 * hash) + REGISTRATIONS_FIELD_NUMBER;
hash = (53 * hash) + getRegistrationsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for the `ListRegistrations` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1alpha2.ListRegistrationsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.domains.v1alpha2.ListRegistrationsResponse)
com.google.cloud.domains.v1alpha2.ListRegistrationsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_ListRegistrationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_ListRegistrationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse.class,
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse.Builder.class);
}
// Construct using com.google.cloud.domains.v1alpha2.ListRegistrationsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (registrationsBuilder_ == null) {
registrations_ = java.util.Collections.emptyList();
} else {
registrations_ = null;
registrationsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_ListRegistrationsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.ListRegistrationsResponse getDefaultInstanceForType() {
return com.google.cloud.domains.v1alpha2.ListRegistrationsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.ListRegistrationsResponse build() {
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.ListRegistrationsResponse buildPartial() {
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse result =
new com.google.cloud.domains.v1alpha2.ListRegistrationsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.domains.v1alpha2.ListRegistrationsResponse result) {
if (registrationsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
registrations_ = java.util.Collections.unmodifiableList(registrations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.registrations_ = registrations_;
} else {
result.registrations_ = registrationsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.domains.v1alpha2.ListRegistrationsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.domains.v1alpha2.ListRegistrationsResponse) {
return mergeFrom((com.google.cloud.domains.v1alpha2.ListRegistrationsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.domains.v1alpha2.ListRegistrationsResponse other) {
if (other == com.google.cloud.domains.v1alpha2.ListRegistrationsResponse.getDefaultInstance())
return this;
if (registrationsBuilder_ == null) {
if (!other.registrations_.isEmpty()) {
if (registrations_.isEmpty()) {
registrations_ = other.registrations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRegistrationsIsMutable();
registrations_.addAll(other.registrations_);
}
onChanged();
}
} else {
if (!other.registrations_.isEmpty()) {
if (registrationsBuilder_.isEmpty()) {
registrationsBuilder_.dispose();
registrationsBuilder_ = null;
registrations_ = other.registrations_;
bitField0_ = (bitField0_ & ~0x00000001);
registrationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRegistrationsFieldBuilder()
: null;
} else {
registrationsBuilder_.addAllMessages(other.registrations_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.domains.v1alpha2.Registration m =
input.readMessage(
com.google.cloud.domains.v1alpha2.Registration.parser(), extensionRegistry);
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.add(m);
} else {
registrationsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.domains.v1alpha2.Registration> registrations_ =
java.util.Collections.emptyList();
private void ensureRegistrationsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
registrations_ =
new java.util.ArrayList<com.google.cloud.domains.v1alpha2.Registration>(registrations_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.domains.v1alpha2.Registration,
com.google.cloud.domains.v1alpha2.Registration.Builder,
com.google.cloud.domains.v1alpha2.RegistrationOrBuilder>
registrationsBuilder_;
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public java.util.List<com.google.cloud.domains.v1alpha2.Registration> getRegistrationsList() {
if (registrationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(registrations_);
} else {
return registrationsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public int getRegistrationsCount() {
if (registrationsBuilder_ == null) {
return registrations_.size();
} else {
return registrationsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1alpha2.Registration getRegistrations(int index) {
if (registrationsBuilder_ == null) {
return registrations_.get(index);
} else {
return registrationsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder setRegistrations(
int index, com.google.cloud.domains.v1alpha2.Registration value) {
if (registrationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegistrationsIsMutable();
registrations_.set(index, value);
onChanged();
} else {
registrationsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder setRegistrations(
int index, com.google.cloud.domains.v1alpha2.Registration.Builder builderForValue) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.set(index, builderForValue.build());
onChanged();
} else {
registrationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder addRegistrations(com.google.cloud.domains.v1alpha2.Registration value) {
if (registrationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegistrationsIsMutable();
registrations_.add(value);
onChanged();
} else {
registrationsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder addRegistrations(
int index, com.google.cloud.domains.v1alpha2.Registration value) {
if (registrationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegistrationsIsMutable();
registrations_.add(index, value);
onChanged();
} else {
registrationsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder addRegistrations(
com.google.cloud.domains.v1alpha2.Registration.Builder builderForValue) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.add(builderForValue.build());
onChanged();
} else {
registrationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder addRegistrations(
int index, com.google.cloud.domains.v1alpha2.Registration.Builder builderForValue) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.add(index, builderForValue.build());
onChanged();
} else {
registrationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder addAllRegistrations(
java.lang.Iterable<? extends com.google.cloud.domains.v1alpha2.Registration> values) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, registrations_);
onChanged();
} else {
registrationsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder clearRegistrations() {
if (registrationsBuilder_ == null) {
registrations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
registrationsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public Builder removeRegistrations(int index) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.remove(index);
onChanged();
} else {
registrationsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1alpha2.Registration.Builder getRegistrationsBuilder(
int index) {
return getRegistrationsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1alpha2.RegistrationOrBuilder getRegistrationsOrBuilder(
int index) {
if (registrationsBuilder_ == null) {
return registrations_.get(index);
} else {
return registrationsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public java.util.List<? extends com.google.cloud.domains.v1alpha2.RegistrationOrBuilder>
getRegistrationsOrBuilderList() {
if (registrationsBuilder_ != null) {
return registrationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(registrations_);
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1alpha2.Registration.Builder addRegistrationsBuilder() {
return getRegistrationsFieldBuilder()
.addBuilder(com.google.cloud.domains.v1alpha2.Registration.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1alpha2.Registration.Builder addRegistrationsBuilder(
int index) {
return getRegistrationsFieldBuilder()
.addBuilder(index, com.google.cloud.domains.v1alpha2.Registration.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1alpha2.Registration registrations = 1;</code>
*/
public java.util.List<com.google.cloud.domains.v1alpha2.Registration.Builder>
getRegistrationsBuilderList() {
return getRegistrationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.domains.v1alpha2.Registration,
com.google.cloud.domains.v1alpha2.Registration.Builder,
com.google.cloud.domains.v1alpha2.RegistrationOrBuilder>
getRegistrationsFieldBuilder() {
if (registrationsBuilder_ == null) {
registrationsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.domains.v1alpha2.Registration,
com.google.cloud.domains.v1alpha2.Registration.Builder,
com.google.cloud.domains.v1alpha2.RegistrationOrBuilder>(
registrations_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
registrations_ = null;
}
return registrationsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.domains.v1alpha2.ListRegistrationsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.domains.v1alpha2.ListRegistrationsResponse)
private static final com.google.cloud.domains.v1alpha2.ListRegistrationsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.domains.v1alpha2.ListRegistrationsResponse();
}
public static com.google.cloud.domains.v1alpha2.ListRegistrationsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRegistrationsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRegistrationsResponse>() {
@java.lang.Override
public ListRegistrationsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRegistrationsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRegistrationsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.ListRegistrationsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,476 | java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/QueryDirectoryContentsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1beta1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1beta1;
/**
*
*
* <pre>
* `QueryDirectoryContents` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest}
*/
public final class QueryDirectoryContentsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest)
QueryDirectoryContentsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use QueryDirectoryContentsRequest.newBuilder() to construct.
private QueryDirectoryContentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QueryDirectoryContentsRequest() {
workspace_ = "";
path_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new QueryDirectoryContentsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryDirectoryContentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryDirectoryContentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest.class,
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest.Builder.class);
}
public static final int WORKSPACE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object workspace_ = "";
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The workspace.
*/
@java.lang.Override
public java.lang.String getWorkspace() {
java.lang.Object ref = workspace_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workspace_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for workspace.
*/
@java.lang.Override
public com.google.protobuf.ByteString getWorkspaceBytes() {
java.lang.Object ref = workspace_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workspace_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PATH_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object path_ = "";
/**
*
*
* <pre>
* Optional. The directory's full path including directory name, relative to
* the workspace root. If left unset, the workspace root is used.
* </pre>
*
* <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The path.
*/
@java.lang.Override
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
path_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The directory's full path including directory name, relative to
* the workspace root. If left unset, the workspace root is used.
* </pre>
*
* <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for path.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Maximum number of paths to return. The server may return fewer
* items than requested. If unspecified, the server will pick an appropriate
* default.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Page token received from a previous `QueryDirectoryContents`
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `QueryDirectoryContents`, with the exception of `page_size`, must match the
* call that provided the page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Page token received from a previous `QueryDirectoryContents`
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `QueryDirectoryContents`, with the exception of `page_size`, must match the
* call that provided the page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workspace_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, workspace_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, path_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workspace_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, workspace_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, path_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest other =
(com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest) obj;
if (!getWorkspace().equals(other.getWorkspace())) return false;
if (!getPath().equals(other.getPath())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + WORKSPACE_FIELD_NUMBER;
hash = (53 * hash) + getWorkspace().hashCode();
hash = (37 * hash) + PATH_FIELD_NUMBER;
hash = (53 * hash) + getPath().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `QueryDirectoryContents` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest)
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryDirectoryContentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryDirectoryContentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest.class,
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest.Builder.class);
}
// Construct using com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
workspace_ = "";
path_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryDirectoryContentsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest
getDefaultInstanceForType() {
return com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest build() {
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest buildPartial() {
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest result =
new com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.workspace_ = workspace_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.path_ = path_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest) {
return mergeFrom((com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest other) {
if (other
== com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest.getDefaultInstance())
return this;
if (!other.getWorkspace().isEmpty()) {
workspace_ = other.workspace_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPath().isEmpty()) {
path_ = other.path_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
workspace_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
path_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object workspace_ = "";
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The workspace.
*/
public java.lang.String getWorkspace() {
java.lang.Object ref = workspace_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workspace_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for workspace.
*/
public com.google.protobuf.ByteString getWorkspaceBytes() {
java.lang.Object ref = workspace_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workspace_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The workspace to set.
* @return This builder for chaining.
*/
public Builder setWorkspace(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
workspace_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearWorkspace() {
workspace_ = getDefaultInstance().getWorkspace();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for workspace to set.
* @return This builder for chaining.
*/
public Builder setWorkspaceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
workspace_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object path_ = "";
/**
*
*
* <pre>
* Optional. The directory's full path including directory name, relative to
* the workspace root. If left unset, the workspace root is used.
* </pre>
*
* <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The path.
*/
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
path_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The directory's full path including directory name, relative to
* the workspace root. If left unset, the workspace root is used.
* </pre>
*
* <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for path.
*/
public com.google.protobuf.ByteString getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The directory's full path including directory name, relative to
* the workspace root. If left unset, the workspace root is used.
* </pre>
*
* <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The path to set.
* @return This builder for chaining.
*/
public Builder setPath(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
path_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The directory's full path including directory name, relative to
* the workspace root. If left unset, the workspace root is used.
* </pre>
*
* <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPath() {
path_ = getDefaultInstance().getPath();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The directory's full path including directory name, relative to
* the workspace root. If left unset, the workspace root is used.
* </pre>
*
* <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for path to set.
* @return This builder for chaining.
*/
public Builder setPathBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
path_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Maximum number of paths to return. The server may return fewer
* items than requested. If unspecified, the server will pick an appropriate
* default.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Maximum number of paths to return. The server may return fewer
* items than requested. If unspecified, the server will pick an appropriate
* default.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Maximum number of paths to return. The server may return fewer
* items than requested. If unspecified, the server will pick an appropriate
* default.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Page token received from a previous `QueryDirectoryContents`
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `QueryDirectoryContents`, with the exception of `page_size`, must match the
* call that provided the page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Page token received from a previous `QueryDirectoryContents`
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `QueryDirectoryContents`, with the exception of `page_size`, must match the
* call that provided the page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Page token received from a previous `QueryDirectoryContents`
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `QueryDirectoryContents`, with the exception of `page_size`, must match the
* call that provided the page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Page token received from a previous `QueryDirectoryContents`
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `QueryDirectoryContents`, with the exception of `page_size`, must match the
* call that provided the page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Page token received from a previous `QueryDirectoryContents`
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `QueryDirectoryContents`, with the exception of `page_size`, must match the
* call that provided the page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest)
private static final com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest();
}
public static com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<QueryDirectoryContentsRequest> PARSER =
new com.google.protobuf.AbstractParser<QueryDirectoryContentsRequest>() {
@java.lang.Override
public QueryDirectoryContentsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<QueryDirectoryContentsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<QueryDirectoryContentsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryDirectoryContentsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,487 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ListNpmPackagesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/artifact.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* The response from listing npm packages.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListNpmPackagesResponse}
*/
public final class ListNpmPackagesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ListNpmPackagesResponse)
ListNpmPackagesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListNpmPackagesResponse.newBuilder() to construct.
private ListNpmPackagesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListNpmPackagesResponse() {
npmPackages_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListNpmPackagesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.ArtifactProto
.internal_static_google_devtools_artifactregistry_v1_ListNpmPackagesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.ArtifactProto
.internal_static_google_devtools_artifactregistry_v1_ListNpmPackagesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse.class,
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse.Builder.class);
}
public static final int NPM_PACKAGES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.devtools.artifactregistry.v1.NpmPackage> npmPackages_;
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.devtools.artifactregistry.v1.NpmPackage> getNpmPackagesList() {
return npmPackages_;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.devtools.artifactregistry.v1.NpmPackageOrBuilder>
getNpmPackagesOrBuilderList() {
return npmPackages_;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
@java.lang.Override
public int getNpmPackagesCount() {
return npmPackages_.size();
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.NpmPackage getNpmPackages(int index) {
return npmPackages_.get(index);
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.NpmPackageOrBuilder getNpmPackagesOrBuilder(
int index) {
return npmPackages_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of artifacts, or empty if there are no
* more artifacts to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of artifacts, or empty if there are no
* more artifacts to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < npmPackages_.size(); i++) {
output.writeMessage(1, npmPackages_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < npmPackages_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, npmPackages_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse other =
(com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse) obj;
if (!getNpmPackagesList().equals(other.getNpmPackagesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNpmPackagesCount() > 0) {
hash = (37 * hash) + NPM_PACKAGES_FIELD_NUMBER;
hash = (53 * hash) + getNpmPackagesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response from listing npm packages.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListNpmPackagesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ListNpmPackagesResponse)
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.ArtifactProto
.internal_static_google_devtools_artifactregistry_v1_ListNpmPackagesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.ArtifactProto
.internal_static_google_devtools_artifactregistry_v1_ListNpmPackagesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse.class,
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (npmPackagesBuilder_ == null) {
npmPackages_ = java.util.Collections.emptyList();
} else {
npmPackages_ = null;
npmPackagesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.ArtifactProto
.internal_static_google_devtools_artifactregistry_v1_ListNpmPackagesResponse_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse build() {
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse buildPartial() {
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse result =
new com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse result) {
if (npmPackagesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
npmPackages_ = java.util.Collections.unmodifiableList(npmPackages_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.npmPackages_ = npmPackages_;
} else {
result.npmPackages_ = npmPackagesBuilder_.build();
}
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse) {
return mergeFrom((com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse other) {
if (other
== com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse.getDefaultInstance())
return this;
if (npmPackagesBuilder_ == null) {
if (!other.npmPackages_.isEmpty()) {
if (npmPackages_.isEmpty()) {
npmPackages_ = other.npmPackages_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNpmPackagesIsMutable();
npmPackages_.addAll(other.npmPackages_);
}
onChanged();
}
} else {
if (!other.npmPackages_.isEmpty()) {
if (npmPackagesBuilder_.isEmpty()) {
npmPackagesBuilder_.dispose();
npmPackagesBuilder_ = null;
npmPackages_ = other.npmPackages_;
bitField0_ = (bitField0_ & ~0x00000001);
npmPackagesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getNpmPackagesFieldBuilder()
: null;
} else {
npmPackagesBuilder_.addAllMessages(other.npmPackages_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.devtools.artifactregistry.v1.NpmPackage m =
input.readMessage(
com.google.devtools.artifactregistry.v1.NpmPackage.parser(),
extensionRegistry);
if (npmPackagesBuilder_ == null) {
ensureNpmPackagesIsMutable();
npmPackages_.add(m);
} else {
npmPackagesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.devtools.artifactregistry.v1.NpmPackage> npmPackages_ =
java.util.Collections.emptyList();
private void ensureNpmPackagesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
npmPackages_ =
new java.util.ArrayList<com.google.devtools.artifactregistry.v1.NpmPackage>(
npmPackages_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.NpmPackage,
com.google.devtools.artifactregistry.v1.NpmPackage.Builder,
com.google.devtools.artifactregistry.v1.NpmPackageOrBuilder>
npmPackagesBuilder_;
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.NpmPackage> getNpmPackagesList() {
if (npmPackagesBuilder_ == null) {
return java.util.Collections.unmodifiableList(npmPackages_);
} else {
return npmPackagesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public int getNpmPackagesCount() {
if (npmPackagesBuilder_ == null) {
return npmPackages_.size();
} else {
return npmPackagesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.NpmPackage getNpmPackages(int index) {
if (npmPackagesBuilder_ == null) {
return npmPackages_.get(index);
} else {
return npmPackagesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder setNpmPackages(
int index, com.google.devtools.artifactregistry.v1.NpmPackage value) {
if (npmPackagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNpmPackagesIsMutable();
npmPackages_.set(index, value);
onChanged();
} else {
npmPackagesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder setNpmPackages(
int index, com.google.devtools.artifactregistry.v1.NpmPackage.Builder builderForValue) {
if (npmPackagesBuilder_ == null) {
ensureNpmPackagesIsMutable();
npmPackages_.set(index, builderForValue.build());
onChanged();
} else {
npmPackagesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder addNpmPackages(com.google.devtools.artifactregistry.v1.NpmPackage value) {
if (npmPackagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNpmPackagesIsMutable();
npmPackages_.add(value);
onChanged();
} else {
npmPackagesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder addNpmPackages(
int index, com.google.devtools.artifactregistry.v1.NpmPackage value) {
if (npmPackagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNpmPackagesIsMutable();
npmPackages_.add(index, value);
onChanged();
} else {
npmPackagesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder addNpmPackages(
com.google.devtools.artifactregistry.v1.NpmPackage.Builder builderForValue) {
if (npmPackagesBuilder_ == null) {
ensureNpmPackagesIsMutable();
npmPackages_.add(builderForValue.build());
onChanged();
} else {
npmPackagesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder addNpmPackages(
int index, com.google.devtools.artifactregistry.v1.NpmPackage.Builder builderForValue) {
if (npmPackagesBuilder_ == null) {
ensureNpmPackagesIsMutable();
npmPackages_.add(index, builderForValue.build());
onChanged();
} else {
npmPackagesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder addAllNpmPackages(
java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1.NpmPackage> values) {
if (npmPackagesBuilder_ == null) {
ensureNpmPackagesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, npmPackages_);
onChanged();
} else {
npmPackagesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder clearNpmPackages() {
if (npmPackagesBuilder_ == null) {
npmPackages_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
npmPackagesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public Builder removeNpmPackages(int index) {
if (npmPackagesBuilder_ == null) {
ensureNpmPackagesIsMutable();
npmPackages_.remove(index);
onChanged();
} else {
npmPackagesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.NpmPackage.Builder getNpmPackagesBuilder(
int index) {
return getNpmPackagesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.NpmPackageOrBuilder getNpmPackagesOrBuilder(
int index) {
if (npmPackagesBuilder_ == null) {
return npmPackages_.get(index);
} else {
return npmPackagesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public java.util.List<? extends com.google.devtools.artifactregistry.v1.NpmPackageOrBuilder>
getNpmPackagesOrBuilderList() {
if (npmPackagesBuilder_ != null) {
return npmPackagesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(npmPackages_);
}
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.NpmPackage.Builder addNpmPackagesBuilder() {
return getNpmPackagesFieldBuilder()
.addBuilder(com.google.devtools.artifactregistry.v1.NpmPackage.getDefaultInstance());
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.NpmPackage.Builder addNpmPackagesBuilder(
int index) {
return getNpmPackagesFieldBuilder()
.addBuilder(
index, com.google.devtools.artifactregistry.v1.NpmPackage.getDefaultInstance());
}
/**
*
*
* <pre>
* The npm packages returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.NpmPackage npm_packages = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.NpmPackage.Builder>
getNpmPackagesBuilderList() {
return getNpmPackagesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.NpmPackage,
com.google.devtools.artifactregistry.v1.NpmPackage.Builder,
com.google.devtools.artifactregistry.v1.NpmPackageOrBuilder>
getNpmPackagesFieldBuilder() {
if (npmPackagesBuilder_ == null) {
npmPackagesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.NpmPackage,
com.google.devtools.artifactregistry.v1.NpmPackage.Builder,
com.google.devtools.artifactregistry.v1.NpmPackageOrBuilder>(
npmPackages_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
npmPackages_ = null;
}
return npmPackagesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of artifacts, or empty if there are no
* more artifacts to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of artifacts, or empty if there are no
* more artifacts to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of artifacts, or empty if there are no
* more artifacts to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of artifacts, or empty if there are no
* more artifacts to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of artifacts, or empty if there are no
* more artifacts to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ListNpmPackagesResponse)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ListNpmPackagesResponse)
private static final com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse();
}
public static com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListNpmPackagesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListNpmPackagesResponse>() {
@java.lang.Override
public ListNpmPackagesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListNpmPackagesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListNpmPackagesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListNpmPackagesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,401 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/Sitemap.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/site_search_engine.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* A sitemap for the SiteSearchEngine.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.Sitemap}
*/
public final class Sitemap extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.Sitemap)
SitemapOrBuilder {
private static final long serialVersionUID = 0L;
// Use Sitemap.newBuilder() to construct.
private Sitemap(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Sitemap() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Sitemap();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1_Sitemap_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1_Sitemap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.Sitemap.class,
com.google.cloud.discoveryengine.v1.Sitemap.Builder.class);
}
private int bitField0_;
private int feedCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object feed_;
public enum FeedCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
URI(2),
FEED_NOT_SET(0);
private final int value;
private FeedCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static FeedCase valueOf(int value) {
return forNumber(value);
}
public static FeedCase forNumber(int value) {
switch (value) {
case 2:
return URI;
case 0:
return FEED_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public FeedCase getFeedCase() {
return FeedCase.forNumber(feedCase_);
}
public static final int URI_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return Whether the uri field is set.
*/
public boolean hasUri() {
return feedCase_ == 2;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The uri.
*/
public java.lang.String getUri() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (feedCase_ == 2) {
feed_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The bytes for uri.
*/
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (feedCase_ == 2) {
feed_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CREATE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp createTime_;
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCreateTime() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (feedCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, feed_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCreateTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (feedCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, feed_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCreateTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.Sitemap)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.Sitemap other =
(com.google.cloud.discoveryengine.v1.Sitemap) obj;
if (!getName().equals(other.getName())) return false;
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (!getCreateTime().equals(other.getCreateTime())) return false;
}
if (!getFeedCase().equals(other.getFeedCase())) return false;
switch (feedCase_) {
case 2:
if (!getUri().equals(other.getUri())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCreateTime().hashCode();
}
switch (feedCase_) {
case 2:
hash = (37 * hash) + URI_FIELD_NUMBER;
hash = (53 * hash) + getUri().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.Sitemap parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.discoveryengine.v1.Sitemap prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A sitemap for the SiteSearchEngine.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.Sitemap}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.Sitemap)
com.google.cloud.discoveryengine.v1.SitemapOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1_Sitemap_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1_Sitemap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.Sitemap.class,
com.google.cloud.discoveryengine.v1.Sitemap.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.Sitemap.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCreateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
feedCase_ = 0;
feed_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1_Sitemap_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Sitemap getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.Sitemap.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Sitemap build() {
com.google.cloud.discoveryengine.v1.Sitemap result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Sitemap buildPartial() {
com.google.cloud.discoveryengine.v1.Sitemap result =
new com.google.cloud.discoveryengine.v1.Sitemap(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.Sitemap result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
private void buildPartialOneofs(com.google.cloud.discoveryengine.v1.Sitemap result) {
result.feedCase_ = feedCase_;
result.feed_ = this.feed_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.Sitemap) {
return mergeFrom((com.google.cloud.discoveryengine.v1.Sitemap) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.Sitemap other) {
if (other == com.google.cloud.discoveryengine.v1.Sitemap.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasCreateTime()) {
mergeCreateTime(other.getCreateTime());
}
switch (other.getFeedCase()) {
case URI:
{
feedCase_ = 2;
feed_ = other.feed_;
onChanged();
break;
}
case FEED_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
feedCase_ = 2;
feed_ = s;
break;
} // case 18
case 26:
{
input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int feedCase_ = 0;
private java.lang.Object feed_;
public FeedCase getFeedCase() {
return FeedCase.forNumber(feedCase_);
}
public Builder clearFeed() {
feedCase_ = 0;
feed_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return Whether the uri field is set.
*/
@java.lang.Override
public boolean hasUri() {
return feedCase_ == 2;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The uri.
*/
@java.lang.Override
public java.lang.String getUri() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (feedCase_ == 2) {
feed_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The bytes for uri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (feedCase_ == 2) {
feed_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @param value The uri to set.
* @return This builder for chaining.
*/
public Builder setUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
feedCase_ = 2;
feed_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearUri() {
if (feedCase_ == 2) {
feedCase_ = 0;
feed_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @param value The bytes for uri to set.
* @return This builder for chaining.
*/
public Builder setUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
feedCase_ = 2;
feed_ = value;
onChanged();
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.Timestamp createTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
createTimeBuilder_;
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
public com.google.protobuf.Timestamp getCreateTime() {
if (createTimeBuilder_ == null) {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
} else {
return createTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
createTime_ = value;
} else {
createTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (createTimeBuilder_ == null) {
createTime_ = builderForValue.build();
} else {
createTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& createTime_ != null
&& createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getCreateTimeBuilder().mergeFrom(value);
} else {
createTime_ = value;
}
} else {
createTimeBuilder_.mergeFrom(value);
}
if (createTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000004);
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getCreateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
if (createTimeBuilder_ != null) {
return createTimeBuilder_.getMessageOrBuilder();
} else {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
}
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder() {
if (createTimeBuilder_ == null) {
createTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCreateTime(), getParentForChildren(), isClean());
createTime_ = null;
}
return createTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.Sitemap)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.Sitemap)
private static final com.google.cloud.discoveryengine.v1.Sitemap DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.Sitemap();
}
public static com.google.cloud.discoveryengine.v1.Sitemap getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Sitemap> PARSER =
new com.google.protobuf.AbstractParser<Sitemap>() {
@java.lang.Override
public Sitemap parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Sitemap> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Sitemap> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Sitemap getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 34,968 | hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotatedColumn.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.boot.model.internal;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.hibernate.AnnotationException;
import org.hibernate.annotations.Array;
import org.hibernate.annotations.Check;
import org.hibernate.annotations.Checks;
import org.hibernate.annotations.ColumnDefault;
import org.hibernate.annotations.ColumnTransformer;
import org.hibernate.annotations.FractionalSeconds;
import org.hibernate.annotations.GeneratedColumn;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.naming.ImplicitBasicColumnNameSource;
import org.hibernate.boot.model.naming.ImplicitNamingStrategy;
import org.hibernate.boot.model.naming.ObjectNameNormalizer;
import org.hibernate.boot.model.naming.PhysicalNamingStrategy;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.source.spi.AttributePath;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.PropertyData;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.CheckConstraint;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.Formula;
import org.hibernate.mapping.Join;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Table;
import org.hibernate.models.spi.ModelsContext;
import static org.hibernate.boot.model.internal.BinderHelper.getPath;
import static org.hibernate.boot.model.internal.BinderHelper.getRelativePath;
import static org.hibernate.boot.model.internal.DialectOverridesAnnotationHelper.getOverridableAnnotation;
import static org.hibernate.boot.BootLogging.BOOT_LOGGER;
import static org.hibernate.internal.util.StringHelper.isBlank;
import static org.hibernate.internal.util.StringHelper.isEmpty;
import static org.hibernate.internal.util.StringHelper.isNotEmpty;
import static org.hibernate.internal.util.StringHelper.nullIfBlank;
import static org.hibernate.internal.util.collections.ArrayHelper.isEmpty;
import static org.hibernate.internal.util.collections.CollectionHelper.isNotEmpty;
/**
* A mapping to a column, logically representing a
* {@link jakarta.persistence.Column} annotation, but not
* every instance corresponds to an explicit annotation in
* the Java code.
* <p>
* This class holds a representation that is intermediate
* between the annotation of the Java source code, and the
* mapping model object {@link Column}. It's used only by
* the {@link AnnotationBinder} while parsing annotations,
* and does not survive into later stages of the startup
* process.
*
* @author Emmanuel Bernard
*/
public class AnnotatedColumn {
private Column mappingColumn;
private boolean insertable = true;
private boolean updatable = true;
private String explicitTableName; // the JPA @Column annotation lets you specify a table name
private boolean isImplicit;
public String sqlType;
private Long length;
private Integer precision;
private Integer scale;
private Integer temporalPrecision; // technically scale, but most dbs call it precision so...
private Integer arrayLength;
private String logicalColumnName;
private boolean unique;
private boolean nullable = true;
private String formulaString;
private Formula formula;
private String readExpression;
private String writeExpression;
private String defaultValue;
private String generatedAs;
private final List<CheckConstraint> checkConstraints = new ArrayList<>();
private AnnotatedColumns parent;
String options;
String comment;
public AnnotatedColumns getParent() {
return parent;
}
public void setParent(AnnotatedColumns parent) {
parent.addColumn( this );
this.parent = parent;
}
public String getLogicalColumnName() {
return logicalColumnName;
}
public String getSqlType() {
return sqlType;
}
public Long getLength() {
return length;
}
public Integer getPrecision() {
return precision;
}
public Integer getScale() {
return scale;
}
public Integer getArrayLength() {
return arrayLength;
}
public void setArrayLength(Integer arrayLength) {
this.arrayLength = arrayLength;
}
public boolean isUnique() {
return unique;
}
public boolean isFormula() {
return isNotEmpty( formulaString );
}
public String getExplicitTableName() {
return explicitTableName;
}
public void setExplicitTableName(String explicitTableName) {
this.explicitTableName = "``".equals( explicitTableName ) ? "" : explicitTableName;
}
public void setFormula(String formula) {
this.formulaString = formula;
}
public boolean isImplicit() {
return isImplicit;
}
public void setInsertable(boolean insertable) {
this.insertable = insertable;
}
public void setUpdatable(boolean updatable) {
this.updatable = updatable;
}
public void setImplicit(boolean implicit) {
isImplicit = implicit;
}
public void setSqlType(String sqlType) {
this.sqlType = sqlType;
}
public void setLength(Long length) {
this.length = length;
}
public void setPrecision(Integer precision) {
this.precision = precision;
}
public void setScale(Integer scale) {
this.scale = scale;
}
public void setTemporalPrecision(Integer temporalPrecision) {
this.temporalPrecision = temporalPrecision;
}
public void setLogicalColumnName(String logicalColumnName) {
this.logicalColumnName = logicalColumnName;
}
public void setUnique(boolean unique) {
this.unique = unique;
}
public boolean isNullable() {
return isFormula() || mappingColumn.isNullable();
}
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
public void addCheckConstraint(String name, String constraint) {
checkConstraints.add( new CheckConstraint( name, constraint ) );
}
public void addCheckConstraint(String name, String constraint, String options) {
checkConstraints.add( new CheckConstraint( name, constraint, options ) );
}
// public String getComment() {
// return comment;
// }
// public void setComment(String comment) {
// this.comment = comment;
// }
public String getGeneratedAs() {
return generatedAs;
}
private void setGeneratedAs(String as) {
this.generatedAs = as;
}
public AnnotatedColumn() {
}
public void bind() {
if ( isNotEmpty( formulaString ) ) {
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.bindingFormula( formulaString );
}
formula = new Formula();
formula.setFormula( formulaString );
}
else {
initMappingColumn(
logicalColumnName,
getParent().getPropertyName(),
length,
precision,
scale,
temporalPrecision,
arrayLength,
nullable,
sqlType,
unique,
true
);
if ( defaultValue != null ) {
mappingColumn.setDefaultValue( defaultValue );
}
for ( CheckConstraint constraint : checkConstraints ) {
mappingColumn.addCheckConstraint( constraint );
}
mappingColumn.setOptions( options );
if ( isNotEmpty( comment ) ) {
mappingColumn.setComment( comment );
}
if ( generatedAs != null ) {
mappingColumn.setGeneratedAs( generatedAs );
}
if ( BOOT_LOGGER.isDebugEnabled() && logicalColumnName != null ) {
BOOT_LOGGER.bindingColumn( logicalColumnName );
}
}
}
protected void initMappingColumn(
String columnName,
String propertyName,
Long length,
Integer precision,
Integer scale,
Integer temporalPrecision,
Integer arrayLength,
boolean nullable,
String sqlType,
boolean unique,
boolean applyNamingStrategy) {
if ( isNotEmpty( formulaString ) ) {
formula = new Formula();
formula.setFormula( formulaString );
}
else {
mappingColumn = new Column();
mappingColumn.setExplicit( !isImplicit );
final boolean nameDetermined =
inferColumnNameIfPossible( columnName, propertyName, applyNamingStrategy );
mappingColumn.setLength( length );
if ( precision != null && precision > 0 ) { //relevant precision
mappingColumn.setPrecision( precision );
mappingColumn.setScale( scale );
}
if ( temporalPrecision != null ) {
mappingColumn.setTemporalPrecision( temporalPrecision );
}
mappingColumn.setArrayLength( arrayLength );
mappingColumn.setNullable( nullable );
mappingColumn.setSqlType( sqlType );
mappingColumn.setUnique( unique );
// if the column name is not determined, we will assign the
// name to the unique key later this method gets called again
// from linkValueUsingDefaultColumnNaming() in second pass
if ( unique && nameDetermined ) {
// assign a unique key name to the column
getParent().getTable().createUniqueKey( mappingColumn, getBuildingContext() );
}
for ( CheckConstraint constraint : checkConstraints ) {
mappingColumn.addCheckConstraint( constraint );
}
mappingColumn.setDefaultValue( defaultValue );
mappingColumn.setOptions( options );
mappingColumn.setComment( comment );
if ( writeExpression != null ) {
final int numberOfJdbcParams = StringHelper.count( writeExpression, '?' );
if ( numberOfJdbcParams != 1 ) {
throw new AnnotationException(
"Write expression in '@ColumnTransformer' for property '" + propertyName
+ "' and column '" + logicalColumnName + "'"
+ " must contain exactly one placeholder character ('?')"
);
}
}
mappingColumn.setResolvedCustomRead( readExpression );
mappingColumn.setCustomWrite( writeExpression );
}
}
public boolean isNameDeferred() {
return mappingColumn == null || isEmpty( mappingColumn.getName() );
}
/**
* Attempt to infer the column name from the explicit {@code name} given by the annotation and the property or field
* name. In the case of a {@link jakarta.persistence.JoinColumn}, this is impossible, due to the rules implemented in
* {@link org.hibernate.boot.model.naming.ImplicitNamingStrategyJpaCompliantImpl#determineJoinColumnName}. In cases
* where the column name cannot be inferred, the {@link Column} is not assigned a name, and this method returns
* {@code false}. The "dummy" {@code Column} will later be replaced with a {@code Column} with a name determined by
* the {@link ImplicitNamingStrategy} when {@link AnnotatedJoinColumn#linkValueUsingDefaultColumnNaming} is called
* during a {@link org.hibernate.boot.spi.SecondPass}.
* @return {@code true} if a name could be inferred
*/
boolean inferColumnNameIfPossible(String columnName, String propertyName, boolean applyNamingStrategy) {
if ( !isEmpty( columnName ) || !isEmpty( propertyName ) ) {
final String logicalColumnName = resolveLogicalColumnName( columnName, propertyName );
mappingColumn.setName( processColumnName( logicalColumnName, applyNamingStrategy ) );
return true;
}
else {
return false;
}
}
private String resolveLogicalColumnName(String columnName, String propertyName) {
final String baseColumnName = isNotEmpty( columnName ) ? columnName : inferColumnName( propertyName );
return parent.getPropertyHolder() != null && parent.getPropertyHolder().isComponent()
// see if we need to apply one-or-more @EmbeddedColumnNaming patterns
? applyEmbeddedColumnNaming( baseColumnName, (ComponentPropertyHolder) parent.getPropertyHolder() )
: baseColumnName;
}
private String applyEmbeddedColumnNaming(String inferredColumnName, ComponentPropertyHolder propertyHolder) {
// code
String result = inferredColumnName;
boolean appliedAnyPatterns = false;
final String columnNamingPattern = propertyHolder.getComponent().getColumnNamingPattern();
if ( isNotEmpty( columnNamingPattern ) ) {
// zip_code
result = String.format( columnNamingPattern, result );
appliedAnyPatterns = true;
}
ComponentPropertyHolder tester = propertyHolder;
while ( tester.parent.isComponent() ) {
final var parentHolder = (ComponentPropertyHolder) tester.parent;
final String parentColumnNamingPattern = parentHolder.getComponent().getColumnNamingPattern();
if ( isNotEmpty( parentColumnNamingPattern ) ) {
// home_zip_code
result = String.format( parentColumnNamingPattern, result );
appliedAnyPatterns = true;
}
tester = parentHolder;
}
if ( appliedAnyPatterns ) {
// we need to adjust the logical name to be picked up in `#addColumnBinding`
this.logicalColumnName = result;
}
return result;
}
protected String processColumnName(String columnName, boolean applyNamingStrategy) {
if ( applyNamingStrategy ) {
final var database = getDatabase();
return getPhysicalNamingStrategy()
.toPhysicalColumnName( database.toIdentifier( columnName ), database.getJdbcEnvironment() )
.render( database.getDialect() );
}
else {
return getObjectNameNormalizer().toDatabaseIdentifierText( columnName );
}
}
protected String inferColumnName(String propertyName) {
Identifier implicitName = getObjectNameNormalizer().normalizeIdentifierQuoting(
getImplicitNamingStrategy().determineBasicColumnName(
new ImplicitBasicColumnNameSource() {
final AttributePath attributePath = AttributePath.parse( propertyName );
@Override
public AttributePath getAttributePath() {
return attributePath;
}
@Override
public boolean isCollectionElement() {
// if the propertyHolder is a collection, assume the
// @Column refers to the element column
final var propertyHolder = getParent().getPropertyHolder();
return !propertyHolder.isComponent() && !propertyHolder.isEntity();
}
@Override
public MetadataBuildingContext getBuildingContext() {
return AnnotatedColumn.this.getBuildingContext();
}
}
)
);
// HHH-6005 magic
if ( implicitName.getText().contains( "_{element}_" ) ) {
implicitName = Identifier.toIdentifier(
implicitName.getText().replace( "_{element}_", "_" ),
implicitName.isQuoted()
);
}
return implicitName.render( getDatabase().getDialect() );
}
private ObjectNameNormalizer getObjectNameNormalizer() {
return getBuildingContext().getObjectNameNormalizer();
}
private Database getDatabase() {
return getBuildingContext().getMetadataCollector().getDatabase();
}
private PhysicalNamingStrategy getPhysicalNamingStrategy() {
return getBuildingContext().getBuildingOptions().getPhysicalNamingStrategy();
}
private ImplicitNamingStrategy getImplicitNamingStrategy() {
return getBuildingContext().getBuildingOptions().getImplicitNamingStrategy();
}
public String getName() {
return mappingColumn.getName();
}
public Column getMappingColumn() {
return mappingColumn;
}
public boolean isInsertable() {
return insertable;
}
public boolean isUpdatable() {
return updatable;
}
public void setNullable(boolean nullable) {
this.nullable = nullable;
if ( mappingColumn != null ) {
mappingColumn.setNullable( nullable );
}
}
protected void setMappingColumn(Column mappingColumn) {
this.mappingColumn = mappingColumn;
}
//TODO: move this operation to AnnotatedColumns!!
public void linkWithAggregateValue(SimpleValue value, Component component) {
mappingColumn = new AggregateColumn( mappingColumn, component );
linkWithValue( value );
}
public void linkWithValue(SimpleValue value) {
if ( formula != null ) {
value.addFormula( formula );
}
else {
final Table table = value.getTable();
parent.setTable( table );
mappingColumn.setValue( value );
value.addColumn( mappingColumn, insertable, updatable );
table.addColumn( mappingColumn );
addColumnBinding( value );
}
}
protected void addColumnBinding(SimpleValue value) {
final String logicalColumnName;
if ( isNotEmpty( this.logicalColumnName ) ) {
logicalColumnName = this.logicalColumnName;
}
else {
final Identifier implicitName = getObjectNameNormalizer().normalizeIdentifierQuoting(
getImplicitNamingStrategy().determineBasicColumnName(
new ImplicitBasicColumnNameSource() {
@Override
public AttributePath getAttributePath() {
return AttributePath.parse( getParent().getPropertyName() );
}
@Override
public boolean isCollectionElement() {
return false;
}
@Override
public MetadataBuildingContext getBuildingContext() {
return AnnotatedColumn.this.getBuildingContext();
}
}
)
);
logicalColumnName = implicitName.render( getDatabase().getDialect() );
}
getBuildingContext().getMetadataCollector()
.addColumnNameBinding( value.getTable(), logicalColumnName, getMappingColumn() );
}
public void forceNotNull() {
if ( mappingColumn == null ) {
throw new CannotForceNonNullableException(
"Cannot perform #forceNotNull because internal org.hibernate.mapping.Column reference is null: " +
"likely a formula"
);
}
nullable = false;
mappingColumn.setNullable( false );
}
public static AnnotatedColumns buildFormulaFromAnnotation(
org.hibernate.annotations.Formula formulaAnn,
// Comment commentAnn,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
Map<String, Join> secondaryTables,
MetadataBuildingContext context) {
return buildColumnOrFormulaFromAnnotation(
null,
formulaAnn,
null,
// commentAnn,
nullability,
propertyHolder,
inferredData,
secondaryTables,
context
);
}
public static AnnotatedColumns buildColumnFromNoAnnotation(
FractionalSeconds fractionalSeconds,
// Comment commentAnn,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
Map<String, Join> secondaryTables,
MetadataBuildingContext context) {
return buildColumnsFromAnnotations(
null,
fractionalSeconds,
// commentAnn,
nullability,
propertyHolder,
inferredData,
secondaryTables,
context
);
}
public static AnnotatedColumns buildColumnFromAnnotation(
jakarta.persistence.Column column,
FractionalSeconds fractionalSeconds,
// Comment commentAnn,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
Map<String, Join> secondaryTables,
MetadataBuildingContext context) {
return buildColumnOrFormulaFromAnnotation(
column,
null,
fractionalSeconds,
// commentAnn,
nullability,
propertyHolder,
inferredData,
secondaryTables,
context
);
}
public static AnnotatedColumns buildColumnsFromAnnotations(
jakarta.persistence.Column[] columns,
FractionalSeconds fractionalSeconds,
// Comment commentAnn,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
Map<String, Join> secondaryTables,
MetadataBuildingContext context) {
return buildColumnsOrFormulaFromAnnotation(
columns,
null,
fractionalSeconds,
// commentAnn,
nullability,
propertyHolder,
inferredData,
null,
secondaryTables,
context
);
}
public static AnnotatedColumns buildColumnFromAnnotations(
jakarta.persistence.Column column,
// Comment commentAnn,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
String suffixForDefaultColumnName,
Map<String, Join> secondaryTables,
MetadataBuildingContext context) {
return buildColumnsOrFormulaFromAnnotation(
column == null
? null
: new jakarta.persistence.Column[] {column},
null,
null,
// commentAnn,
nullability,
propertyHolder,
inferredData,
suffixForDefaultColumnName,
secondaryTables,
context
);
}
public static AnnotatedColumns buildColumnOrFormulaFromAnnotation(
jakarta.persistence.Column column,
org.hibernate.annotations.Formula formulaAnn,
FractionalSeconds fractionalSeconds,
// Comment commentAnn,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
Map<String, Join> secondaryTables,
MetadataBuildingContext context) {
return buildColumnsOrFormulaFromAnnotation(
column==null
? null
: new jakarta.persistence.Column[] {column},
formulaAnn,
fractionalSeconds,
// commentAnn,
nullability,
propertyHolder,
inferredData,
null,
secondaryTables,
context
);
}
public static AnnotatedColumns buildColumnsOrFormulaFromAnnotation(
jakarta.persistence.Column[] columns,
org.hibernate.annotations.Formula formulaAnn,
FractionalSeconds fractionalSeconds,
// Comment comment,
Nullability nullability,
PropertyHolder propertyHolder,
PropertyData inferredData,
String suffixForDefaultColumnName,
Map<String, Join> secondaryTables,
MetadataBuildingContext context) {
if ( formulaAnn != null ) {
final var parent = new AnnotatedColumns();
parent.setPropertyHolder( propertyHolder );
parent.setPropertyName( getRelativePath( propertyHolder, inferredData.getPropertyName() ) );
parent.setBuildingContext( context );
parent.setJoins( secondaryTables ); //unnecessary
final var formulaColumn = new AnnotatedColumn();
formulaColumn.setFormula( formulaAnn.value() );
formulaColumn.setImplicit( false );
// formulaColumn.setBuildingContext( context );
// formulaColumn.setPropertyHolder( propertyHolder );
formulaColumn.setParent( parent );
formulaColumn.bind();
return parent;
}
else {
final var actualColumns = overrideColumns( columns, propertyHolder, inferredData );
if ( isEmpty( actualColumns ) ) {
return buildImplicitColumn(
fractionalSeconds,
inferredData,
suffixForDefaultColumnName,
secondaryTables,
propertyHolder,
// comment,
nullability,
context
);
}
else {
return buildExplicitColumns(
// comment,
propertyHolder,
inferredData,
suffixForDefaultColumnName,
secondaryTables,
context,
actualColumns,
fractionalSeconds
);
}
}
}
private static jakarta.persistence.Column[] overrideColumns(
jakarta.persistence.Column[] columns,
PropertyHolder propertyHolder,
PropertyData inferredData ) {
final String path = getPath( propertyHolder, inferredData );
final var overriddenCols = propertyHolder.getOverriddenColumn( path );
if ( overriddenCols != null ) {
//check for overridden first
if ( columns != null && overriddenCols.length != columns.length ) {
//TODO: unfortunately, we never actually see this nice error message, since
// PersistentClass.validate() gets called first and produces a worse message
throw new AnnotationException( "Property '" + path
+ "' specifies " + columns.length
+ " '@AttributeOverride's but the overridden property has " + overriddenCols.length
+ " columns (every column must have exactly one '@AttributeOverride')" );
}
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.columnMappingOverridden( inferredData.getPropertyName() );
}
return isEmpty( overriddenCols ) ? null : overriddenCols;
}
else {
return columns;
}
}
private static AnnotatedColumns buildExplicitColumns(
// Comment comment,
PropertyHolder propertyHolder,
PropertyData inferredData,
String suffixForDefaultColumnName,
Map<String, Join> secondaryTables,
MetadataBuildingContext context,
jakarta.persistence.Column[] actualCols,
FractionalSeconds fractionalSeconds) {
final var parent = new AnnotatedColumns();
parent.setPropertyHolder( propertyHolder );
parent.setPropertyName( getRelativePath( propertyHolder, inferredData.getPropertyName() ) );
parent.setJoins( secondaryTables );
parent.setBuildingContext( context );
final var database = context.getMetadataCollector().getDatabase();
for ( var column : actualCols ) {
buildColumn(
// comment,
propertyHolder,
inferredData,
suffixForDefaultColumnName,
parent,
actualCols.length,
database,
column,
fractionalSeconds,
getSqlType( context, column ),
getTableName( column, database ),
context.getBootstrapContext().getModelsContext()
);
}
return parent;
}
private static String getTableName(
jakarta.persistence.Column column,
Database database) {
final String table = column.table();
return table.isBlank()
? ""
: database.getJdbcEnvironment().getIdentifierHelper().toIdentifier( table ).render();
}
private static String getSqlType(
MetadataBuildingContext context,
jakarta.persistence.Column column) {
final String columnDefinition = column.columnDefinition();
return columnDefinition.isBlank()
? null
: context.getObjectNameNormalizer().applyGlobalQuoting( columnDefinition );
}
private static AnnotatedColumn buildColumn(
// Comment comment,
PropertyHolder propertyHolder,
PropertyData inferredData,
String suffixForDefaultColumnName,
AnnotatedColumns parent,
int numberOfColumns,
Database database,
jakarta.persistence.Column column,
FractionalSeconds fractionalSeconds,
String sqlType,
String tableName,
ModelsContext sourceModelContext) {
final String columnName = logicalColumnName( inferredData, suffixForDefaultColumnName, database, column );
final var annotatedColumn = new AnnotatedColumn();
annotatedColumn.setLogicalColumnName( columnName );
annotatedColumn.setImplicit( false );
annotatedColumn.setSqlType( sqlType );
annotatedColumn.setLength( (long) column.length() );
if ( fractionalSeconds != null ) {
annotatedColumn.setTemporalPrecision( fractionalSeconds.value() );
}
else {
annotatedColumn.setPrecision( column.precision() );
// The passed annotation could also be a MapKeyColumn
annotatedColumn.setTemporalPrecision( temporalPrecision( column ) );
}
annotatedColumn.setScale( column.scale() );
annotatedColumn.handleArrayLength( inferredData );
annotatedColumn.setNullable( column.nullable() );
annotatedColumn.setUnique( column.unique() );
annotatedColumn.setInsertable( column.insertable() );
annotatedColumn.setUpdatable( column.updatable() );
annotatedColumn.setExplicitTableName( tableName );
annotatedColumn.setParent( parent );
annotatedColumn.applyColumnDefault( inferredData, numberOfColumns );
annotatedColumn.applyGeneratedAs( inferredData, numberOfColumns );
annotatedColumn.applyColumnCheckConstraint( column );
annotatedColumn.applyColumnOptions( column );
annotatedColumn.applyColumnComment(column);
annotatedColumn.applyCheckConstraint( inferredData, numberOfColumns );
annotatedColumn.extractDataFromPropertyData( propertyHolder, inferredData, sourceModelContext );
annotatedColumn.bind();
return annotatedColumn;
}
private static Integer temporalPrecision(jakarta.persistence.Column column) {
final Integer secondPrecision =
column.annotationType() == jakarta.persistence.Column.class
? column.secondPrecision()
: null;
return secondPrecision == null || secondPrecision == -1
? null
: secondPrecision;
}
private void handleArrayLength(PropertyData inferredData) {
final var arrayAnn = inferredData.getAttributeMember().getDirectAnnotationUsage( Array.class );
if ( arrayAnn != null ) {
setArrayLength( arrayAnn.length() );
}
}
private static String logicalColumnName(
PropertyData inferredData,
String suffixForDefaultColumnName,
Database database,
jakarta.persistence.Column column) {
final String columnName = getColumnName( database, column );
// NOTE: this is the logical column name, not the physical!
return isEmpty( columnName ) && isNotEmpty( suffixForDefaultColumnName )
? inferredData.getPropertyName() + suffixForDefaultColumnName
: columnName;
}
private static String getColumnName(Database database, jakarta.persistence.Column column) {
final String name = column.name();
return name.isBlank()
? null
: database.getJdbcEnvironment().getIdentifierHelper().toIdentifier( name ).render();
}
void applyColumnDefault(PropertyData inferredData, int length) {
final var memberDetails = inferredData.getAttributeMember();
if ( memberDetails != null ) {
final var columnDefault = getOverridableAnnotation(
memberDetails,
ColumnDefault.class,
getBuildingContext()
);
if ( columnDefault != null ) {
if ( length != 1 ) {
throw new AnnotationException( "'@ColumnDefault' may only be applied to single-column mappings but '"
+ memberDetails.getName() + "' maps to " + length + " columns" );
}
setDefaultValue( columnDefault.value() );
}
}
else {
BOOT_LOGGER.couldNotPerformColumnDefaultLookup();
}
}
void applyGeneratedAs(PropertyData inferredData, int length) {
final var memberDetails = inferredData.getAttributeMember();
if ( memberDetails != null ) {
final var generatedColumn = getOverridableAnnotation(
memberDetails,
GeneratedColumn.class,
getBuildingContext()
);
if ( generatedColumn != null ) {
if (length!=1) {
throw new AnnotationException("'@GeneratedColumn' may only be applied to single-column mappings but '"
+ memberDetails.getName() + "' maps to " + length + " columns" );
}
setGeneratedAs( generatedColumn.value() );
}
}
else {
BOOT_LOGGER.couldNotPerformGeneratedColumnLookup();
}
}
private void applyColumnCheckConstraint(jakarta.persistence.Column column) {
applyCheckConstraints( column.check() );
}
void applyCheckConstraints(jakarta.persistence.CheckConstraint[] checkConstraintAnnotationUsages) {
if ( isNotEmpty( checkConstraintAnnotationUsages ) ) {
for ( var checkConstraintAnnotationUsage : checkConstraintAnnotationUsages ) {
addCheckConstraint(
nullIfBlank( checkConstraintAnnotationUsage.name() ),
checkConstraintAnnotationUsage.constraint(),
checkConstraintAnnotationUsage.options()
);
}
}
}
void applyCheckConstraint(PropertyData inferredData, int length) {
final var memberDetails = inferredData.getAttributeMember();
if ( memberDetails != null ) {
// if there are multiple annotations, they're not overrideable
final var checksAnn = memberDetails.getDirectAnnotationUsage( Checks.class );
if ( checksAnn != null ) {
final var checkAnns = checksAnn.value();
for ( var checkAnn : checkAnns ) {
addCheckConstraint( nullIfBlank( checkAnn.name() ), checkAnn.constraints() );
}
}
else {
final var checkAnn = getOverridableAnnotation( memberDetails, Check.class, getBuildingContext() );
if ( checkAnn != null ) {
if ( length != 1 ) {
throw new AnnotationException("'@Check' may only be applied to single-column mappings but '"
+ memberDetails.getName() + "' maps to " + length + " columns (use a table-level '@Check')" );
}
addCheckConstraint( nullIfBlank( checkAnn.name() ), checkAnn.constraints() );
}
}
}
else {
BOOT_LOGGER.couldNotPerformCheckLookup();
}
}
//must only be called after all setters are defined and before binding
private void extractDataFromPropertyData(
PropertyHolder propertyHolder,
PropertyData inferredData,
ModelsContext context) {
if ( inferredData != null ) {
final var memberDetails = inferredData.getAttributeMember();
if ( memberDetails != null ) {
if ( propertyHolder.isComponent() ) {
processColumnTransformerExpressions( propertyHolder.getOverriddenColumnTransformer( logicalColumnName ) );
}
memberDetails.forEachAnnotationUsage( ColumnTransformer.class, context, this::processColumnTransformerExpressions );
}
}
}
private void processColumnTransformerExpressions(ColumnTransformer annotation) {
if ( annotation != null ) {
final String targetColumnName = annotation.forColumn();
if ( isBlank( targetColumnName )
|| targetColumnName.equals( logicalColumnName != null ? logicalColumnName : "" ) ) {
readExpression = nullIfBlank( annotation.read() );
writeExpression = nullIfBlank( annotation.write() );
}
}
}
private static AnnotatedColumns buildImplicitColumn(
FractionalSeconds fractionalSeconds,
PropertyData inferredData,
String suffixForDefaultColumnName,
Map<String, Join> secondaryTables,
PropertyHolder propertyHolder,
// Comment comment,
Nullability nullability,
MetadataBuildingContext context) {
final var columns = new AnnotatedColumns();
columns.setPropertyHolder( propertyHolder );
columns.setPropertyName( getRelativePath( propertyHolder, inferredData.getPropertyName() ) );
columns.setBuildingContext( context );
columns.setJoins( secondaryTables );
columns.setPropertyHolder( propertyHolder );
final AnnotatedColumn column = new AnnotatedColumn();
// if ( comment != null ) {
// column.setComment( comment.value() );
// }
//not following the spec but more clean
if ( nullability != Nullability.FORCED_NULL
&& !PropertyBinder.isOptional( inferredData.getAttributeMember(), propertyHolder ) ) {
column.setNullable( false );
}
final String propertyName = inferredData.getPropertyName();
// column.setPropertyHolder( propertyHolder );
// column.setPropertyName( getRelativePath( propertyHolder, propertyName ) );
// column.setJoins( secondaryTables );
// column.setBuildingContext( context );
// property name + suffix is an "explicit" column name
final boolean implicit = isEmpty( suffixForDefaultColumnName );
if ( !implicit ) {
column.setLogicalColumnName( propertyName + suffixForDefaultColumnName );
}
column.setImplicit( implicit );
column.setParent( columns );
column.applyColumnDefault( inferredData, 1 );
column.applyGeneratedAs( inferredData, 1 );
column.applyCheckConstraint( inferredData, 1 );
column.extractDataFromPropertyData( propertyHolder, inferredData, context.getBootstrapContext().getModelsContext() );
column.handleArrayLength( inferredData );
if ( fractionalSeconds != null ) {
column.setTemporalPrecision( fractionalSeconds.value() );
}
column.bind();
return columns;
}
@Override
public String toString() {
final var string = new StringBuilder();
string.append( getClass().getSimpleName() ).append( "(" );
if ( isNotEmpty( formulaString ) ) {
string.append( "formula='" ).append( formulaString );
}
else if ( isNotEmpty( logicalColumnName ) ) {
string.append( "column='" ).append( logicalColumnName );
}
string.append( ")" );
return string.toString();
}
MetadataBuildingContext getBuildingContext() {
return getParent().getBuildingContext();
}
private void applyColumnOptions(jakarta.persistence.Column column) {
options = column.options();
}
private void applyColumnComment(jakarta.persistence.Column column) {
if ( !column.comment().isBlank() ) {
comment = column.comment();
}
}
void setOptions(String options){
this.options = options;
}
void setComment(String comment){
this.comment = comment;
}
}
|
openjdk/jdk8 | 37,733 | jdk/src/solaris/classes/sun/nio/ch/sctp/SctpMultiChannelImpl.java | /*
* Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.nio.ch.sctp;
import java.net.InetAddress;
import java.net.SocketAddress;
import java.net.SocketException;
import java.net.InetSocketAddress;
import java.io.FileDescriptor;
import java.io.IOException;
import java.util.Collections;
import java.util.Map.Entry;
import java.util.Iterator;
import java.util.Set;
import java.util.HashSet;
import java.util.HashMap;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.NotYetBoundException;
import java.nio.channels.spi.SelectorProvider;
import com.sun.nio.sctp.AbstractNotificationHandler;
import com.sun.nio.sctp.Association;
import com.sun.nio.sctp.AssociationChangeNotification;
import com.sun.nio.sctp.HandlerResult;
import com.sun.nio.sctp.IllegalReceiveException;
import com.sun.nio.sctp.InvalidStreamException;
import com.sun.nio.sctp.IllegalUnbindException;
import com.sun.nio.sctp.NotificationHandler;
import com.sun.nio.sctp.MessageInfo;
import com.sun.nio.sctp.SctpChannel;
import com.sun.nio.sctp.SctpMultiChannel;
import com.sun.nio.sctp.SctpSocketOption;
import sun.nio.ch.DirectBuffer;
import sun.nio.ch.NativeThread;
import sun.nio.ch.IOStatus;
import sun.nio.ch.IOUtil;
import sun.nio.ch.Net;
import sun.nio.ch.PollArrayWrapper;
import sun.nio.ch.SelChImpl;
import sun.nio.ch.SelectionKeyImpl;
import sun.nio.ch.Util;
import static com.sun.nio.sctp.SctpStandardSocketOptions.*;
import static sun.nio.ch.sctp.ResultContainer.*;
/**
* An implementation of SctpMultiChannel
*/
public class SctpMultiChannelImpl extends SctpMultiChannel
implements SelChImpl
{
private final FileDescriptor fd;
private final int fdVal;
/* IDs of native threads doing send and receives, for signalling */
private volatile long receiverThread = 0;
private volatile long senderThread = 0;
/* Lock held by current receiving thread */
private final Object receiveLock = new Object();
/* Lock held by current sending thread */
private final Object sendLock = new Object();
/* Lock held by any thread that modifies the state fields declared below
* DO NOT invoke a blocking I/O operation while holding this lock! */
private final Object stateLock = new Object();
private enum ChannelState {
UNINITIALIZED,
KILLPENDING,
KILLED,
}
/* -- The following fields are protected by stateLock -- */
private ChannelState state = ChannelState.UNINITIALIZED;
/* Binding: Once bound the port will remain constant. */
int port = -1;
private HashSet<InetSocketAddress> localAddresses = new HashSet<InetSocketAddress>();
/* Has the channel been bound to the wildcard address */
private boolean wildcard; /* false */
/* Keeps a map of addresses to association, and visa versa */
private HashMap<SocketAddress, Association> addressMap =
new HashMap<SocketAddress, Association>();
private HashMap<Association, Set<SocketAddress>> associationMap =
new HashMap<Association, Set<SocketAddress>>();
/* -- End of fields protected by stateLock -- */
/* If an association has been shutdown mark it for removal after
* the user handler has been invoked */
private final ThreadLocal<Association> associationToRemove =
new ThreadLocal<Association>() {
@Override protected Association initialValue() {
return null;
}
};
/* A notification handler cannot invoke receive */
private final ThreadLocal<Boolean> receiveInvoked =
new ThreadLocal<Boolean>() {
@Override protected Boolean initialValue() {
return Boolean.FALSE;
}
};
public SctpMultiChannelImpl(SelectorProvider provider)
throws IOException {
//TODO: update provider, remove public modifier
super(provider);
this.fd = SctpNet.socket(false /*one-to-many*/);
this.fdVal = IOUtil.fdVal(fd);
}
@Override
public SctpMultiChannel bind(SocketAddress local, int backlog)
throws IOException {
synchronized (receiveLock) {
synchronized (sendLock) {
synchronized (stateLock) {
ensureOpen();
if (isBound())
SctpNet.throwAlreadyBoundException();
InetSocketAddress isa = (local == null) ?
new InetSocketAddress(0) : Net.checkAddress(local);
SecurityManager sm = System.getSecurityManager();
if (sm != null)
sm.checkListen(isa.getPort());
Net.bind(fd, isa.getAddress(), isa.getPort());
InetSocketAddress boundIsa = Net.localAddress(fd);
port = boundIsa.getPort();
localAddresses.add(isa);
if (isa.getAddress().isAnyLocalAddress())
wildcard = true;
SctpNet.listen(fdVal, backlog < 1 ? 50 : backlog);
}
}
}
return this;
}
@Override
public SctpMultiChannel bindAddress(InetAddress address)
throws IOException {
return bindUnbindAddress(address, true);
}
@Override
public SctpMultiChannel unbindAddress(InetAddress address)
throws IOException {
return bindUnbindAddress(address, false);
}
private SctpMultiChannel bindUnbindAddress(InetAddress address,
boolean add)
throws IOException {
if (address == null)
throw new IllegalArgumentException();
synchronized (receiveLock) {
synchronized (sendLock) {
synchronized (stateLock) {
if (!isOpen())
throw new ClosedChannelException();
if (!isBound())
throw new NotYetBoundException();
if (wildcard)
throw new IllegalStateException(
"Cannot add or remove addresses from a channel that is bound to the wildcard address");
if (address.isAnyLocalAddress())
throw new IllegalArgumentException(
"Cannot add or remove the wildcard address");
if (add) {
for (InetSocketAddress addr : localAddresses) {
if (addr.getAddress().equals(address)) {
SctpNet.throwAlreadyBoundException();
}
}
} else { /*removing */
/* Verify that there is more than one address
* and that address is already bound */
if (localAddresses.size() <= 1)
throw new IllegalUnbindException("Cannot remove address from a channel with only one address bound");
boolean foundAddress = false;
for (InetSocketAddress addr : localAddresses) {
if (addr.getAddress().equals(address)) {
foundAddress = true;
break;
}
}
if (!foundAddress )
throw new IllegalUnbindException("Cannot remove address from a channel that is not bound to that address");
}
SctpNet.bindx(fdVal, new InetAddress[]{address}, port, add);
/* Update our internal Set to reflect the addition/removal */
if (add)
localAddresses.add(new InetSocketAddress(address, port));
else {
for (InetSocketAddress addr : localAddresses) {
if (addr.getAddress().equals(address)) {
localAddresses.remove(addr);
break;
}
}
}
}
}
}
return this;
}
@Override
public Set<Association> associations()
throws ClosedChannelException, NotYetBoundException {
synchronized (stateLock) {
if (!isOpen())
throw new ClosedChannelException();
if (!isBound())
throw new NotYetBoundException();
return Collections.unmodifiableSet(associationMap.keySet());
}
}
private boolean isBound() {
synchronized (stateLock) {
return port == -1 ? false : true;
}
}
private void ensureOpen() throws IOException {
synchronized (stateLock) {
if (!isOpen())
throw new ClosedChannelException();
}
}
private void receiverCleanup() throws IOException {
synchronized (stateLock) {
receiverThread = 0;
if (state == ChannelState.KILLPENDING)
kill();
}
}
private void senderCleanup() throws IOException {
synchronized (stateLock) {
senderThread = 0;
if (state == ChannelState.KILLPENDING)
kill();
}
}
@Override
protected void implConfigureBlocking(boolean block) throws IOException {
IOUtil.configureBlocking(fd, block);
}
@Override
public void implCloseSelectableChannel() throws IOException {
synchronized (stateLock) {
SctpNet.preClose(fdVal);
if (receiverThread != 0)
NativeThread.signal(receiverThread);
if (senderThread != 0)
NativeThread.signal(senderThread);
if (!isRegistered())
kill();
}
}
@Override
public FileDescriptor getFD() {
return fd;
}
@Override
public int getFDVal() {
return fdVal;
}
/**
* Translates native poll revent ops into a ready operation ops
*/
private boolean translateReadyOps(int ops, int initialOps,
SelectionKeyImpl sk) {
int intOps = sk.nioInterestOps();
int oldOps = sk.nioReadyOps();
int newOps = initialOps;
if ((ops & PollArrayWrapper.POLLNVAL) != 0) {
/* This should only happen if this channel is pre-closed while a
* selection operation is in progress
* ## Throw an error if this channel has not been pre-closed */
return false;
}
if ((ops & (PollArrayWrapper.POLLERR
| PollArrayWrapper.POLLHUP)) != 0) {
newOps = intOps;
sk.nioReadyOps(newOps);
return (newOps & ~oldOps) != 0;
}
if (((ops & PollArrayWrapper.POLLIN) != 0) &&
((intOps & SelectionKey.OP_READ) != 0))
newOps |= SelectionKey.OP_READ;
if (((ops & PollArrayWrapper.POLLOUT) != 0) &&
((intOps & SelectionKey.OP_WRITE) != 0))
newOps |= SelectionKey.OP_WRITE;
sk.nioReadyOps(newOps);
return (newOps & ~oldOps) != 0;
}
@Override
public boolean translateAndUpdateReadyOps(int ops, SelectionKeyImpl sk) {
return translateReadyOps(ops, sk.nioReadyOps(), sk);
}
@Override
public boolean translateAndSetReadyOps(int ops, SelectionKeyImpl sk) {
return translateReadyOps(ops, 0, sk);
}
@Override
public void translateAndSetInterestOps(int ops, SelectionKeyImpl sk) {
int newOps = 0;
if ((ops & SelectionKey.OP_READ) != 0)
newOps |= PollArrayWrapper.POLLIN;
if ((ops & SelectionKey.OP_WRITE) != 0)
newOps |= PollArrayWrapper.POLLOUT;
sk.selector.putEventOps(sk, newOps);
}
@Override
public void kill() throws IOException {
synchronized (stateLock) {
if (state == ChannelState.KILLED)
return;
if (state == ChannelState.UNINITIALIZED) {
state = ChannelState.KILLED;
return;
}
assert !isOpen() && !isRegistered();
/* Postpone the kill if there is a thread sending or receiving. */
if (receiverThread == 0 && senderThread == 0) {
SctpNet.close(fdVal);
state = ChannelState.KILLED;
} else {
state = ChannelState.KILLPENDING;
}
}
}
@Override
public <T> SctpMultiChannel setOption(SctpSocketOption<T> name,
T value,
Association association)
throws IOException {
if (name == null)
throw new NullPointerException();
if (!(supportedOptions().contains(name)))
throw new UnsupportedOperationException("'" + name + "' not supported");
synchronized (stateLock) {
if (association != null && (name.equals(SCTP_PRIMARY_ADDR) ||
name.equals(SCTP_SET_PEER_PRIMARY_ADDR))) {
checkAssociation(association);
}
if (!isOpen())
throw new ClosedChannelException();
int assocId = association == null ? 0 : association.associationID();
SctpNet.setSocketOption(fdVal, name, value, assocId);
}
return this;
}
@Override
@SuppressWarnings("unchecked")
public <T> T getOption(SctpSocketOption<T> name, Association association)
throws IOException {
if (name == null)
throw new NullPointerException();
if (!supportedOptions().contains(name))
throw new UnsupportedOperationException("'" + name + "' not supported");
synchronized (stateLock) {
if (association != null && (name.equals(SCTP_PRIMARY_ADDR) ||
name.equals(SCTP_SET_PEER_PRIMARY_ADDR))) {
checkAssociation(association);
}
if (!isOpen())
throw new ClosedChannelException();
int assocId = association == null ? 0 : association.associationID();
return (T)SctpNet.getSocketOption(fdVal, name, assocId);
}
}
private static class DefaultOptionsHolder {
static final Set<SctpSocketOption<?>> defaultOptions = defaultOptions();
private static Set<SctpSocketOption<?>> defaultOptions() {
HashSet<SctpSocketOption<?>> set = new HashSet<SctpSocketOption<?>>(10);
set.add(SCTP_DISABLE_FRAGMENTS);
set.add(SCTP_EXPLICIT_COMPLETE);
set.add(SCTP_FRAGMENT_INTERLEAVE);
set.add(SCTP_INIT_MAXSTREAMS);
set.add(SCTP_NODELAY);
set.add(SCTP_PRIMARY_ADDR);
set.add(SCTP_SET_PEER_PRIMARY_ADDR);
set.add(SO_SNDBUF);
set.add(SO_RCVBUF);
set.add(SO_LINGER);
return Collections.unmodifiableSet(set);
}
}
@Override
public final Set<SctpSocketOption<?>> supportedOptions() {
return DefaultOptionsHolder.defaultOptions;
}
@Override
public <T> MessageInfo receive(ByteBuffer buffer,
T attachment,
NotificationHandler<T> handler)
throws IOException {
if (buffer == null)
throw new IllegalArgumentException("buffer cannot be null");
if (buffer.isReadOnly())
throw new IllegalArgumentException("Read-only buffer");
if (receiveInvoked.get())
throw new IllegalReceiveException(
"cannot invoke receive from handler");
receiveInvoked.set(Boolean.TRUE);
try {
ResultContainer resultContainer = new ResultContainer();
do {
resultContainer.clear();
synchronized (receiveLock) {
ensureOpen();
if (!isBound())
throw new NotYetBoundException();
int n = 0;
try {
begin();
synchronized (stateLock) {
if(!isOpen())
return null;
receiverThread = NativeThread.current();
}
do {
n = receive(fdVal, buffer, resultContainer);
} while ((n == IOStatus.INTERRUPTED) && isOpen());
} finally {
receiverCleanup();
end((n > 0) || (n == IOStatus.UNAVAILABLE));
assert IOStatus.check(n);
}
if (!resultContainer.isNotification()) {
/* message or nothing */
if (resultContainer.hasSomething()) {
/* Set the association before returning */
MessageInfoImpl info =
resultContainer.getMessageInfo();
info.setAssociation(lookupAssociation(info.
associationID()));
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
InetSocketAddress isa = (InetSocketAddress)info.address();
if (!addressMap.containsKey(isa)) {
/* must be a new association */
try {
sm.checkAccept(isa.getAddress().getHostAddress(),
isa.getPort());
} catch (SecurityException se) {
buffer.clear();
throw se;
}
}
}
assert info.association() != null;
return info;
} else {
/* Non-blocking may return null if nothing available*/
return null;
}
} else { /* notification */
synchronized (stateLock) {
handleNotificationInternal(
resultContainer);
}
}
} /* receiveLock */
} while (handler == null ? true :
(invokeNotificationHandler(resultContainer, handler, attachment)
== HandlerResult.CONTINUE));
} finally {
receiveInvoked.set(Boolean.FALSE);
}
return null;
}
private int receive(int fd,
ByteBuffer dst,
ResultContainer resultContainer)
throws IOException {
int pos = dst.position();
int lim = dst.limit();
assert (pos <= lim);
int rem = (pos <= lim ? lim - pos : 0);
if (dst instanceof DirectBuffer && rem > 0)
return receiveIntoNativeBuffer(fd, resultContainer, dst, rem, pos);
/* Substitute a native buffer. */
int newSize = Math.max(rem, 1);
ByteBuffer bb = Util.getTemporaryDirectBuffer(newSize);
try {
int n = receiveIntoNativeBuffer(fd, resultContainer, bb, newSize, 0);
bb.flip();
if (n > 0 && rem > 0)
dst.put(bb);
return n;
} finally {
Util.releaseTemporaryDirectBuffer(bb);
}
}
private int receiveIntoNativeBuffer(int fd,
ResultContainer resultContainer,
ByteBuffer bb,
int rem,
int pos)
throws IOException {
int n = receive0(fd, resultContainer, ((DirectBuffer)bb).address() + pos, rem);
if (n > 0)
bb.position(pos + n);
return n;
}
private InternalNotificationHandler internalNotificationHandler =
new InternalNotificationHandler();
private void handleNotificationInternal(ResultContainer resultContainer)
{
invokeNotificationHandler(resultContainer,
internalNotificationHandler, null);
}
private class InternalNotificationHandler
extends AbstractNotificationHandler<Object>
{
@Override
public HandlerResult handleNotification(
AssociationChangeNotification not, Object unused) {
AssociationChange sac = (AssociationChange) not;
/* Update map to reflect change in association */
switch (not.event()) {
case COMM_UP :
Association newAssociation = new AssociationImpl
(sac.assocId(), sac.maxInStreams(), sac.maxOutStreams());
addAssociation(newAssociation);
break;
case SHUTDOWN :
case COMM_LOST :
//case RESTART: ???
/* mark association for removal after user handler invoked*/
associationToRemove.set(lookupAssociation(sac.assocId()));
}
return HandlerResult.CONTINUE;
}
}
private <T> HandlerResult invokeNotificationHandler(
ResultContainer resultContainer,
NotificationHandler<T> handler,
T attachment) {
HandlerResult result;
SctpNotification notification = resultContainer.notification();
notification.setAssociation(lookupAssociation(notification.assocId()));
if (!(handler instanceof AbstractNotificationHandler)) {
result = handler.handleNotification(notification, attachment);
} else { /* AbstractNotificationHandler */
AbstractNotificationHandler<T> absHandler =
(AbstractNotificationHandler<T>)handler;
switch(resultContainer.type()) {
case ASSOCIATION_CHANGED :
result = absHandler.handleNotification(
resultContainer.getAssociationChanged(), attachment);
break;
case PEER_ADDRESS_CHANGED :
result = absHandler.handleNotification(
resultContainer.getPeerAddressChanged(), attachment);
break;
case SEND_FAILED :
result = absHandler.handleNotification(
resultContainer.getSendFailed(), attachment);
break;
case SHUTDOWN :
result = absHandler.handleNotification(
resultContainer.getShutdown(), attachment);
break;
default :
/* implementation specific handlers */
result = absHandler.handleNotification(
resultContainer.notification(), attachment);
}
}
if (!(handler instanceof InternalNotificationHandler)) {
/* Only remove associations after user handler
* has finished with them */
Association assoc = associationToRemove.get();
if (assoc != null) {
removeAssociation(assoc);
associationToRemove.set(null);
}
}
return result;
}
private Association lookupAssociation(int assocId) {
/* Lookup the association in our internal map */
synchronized (stateLock) {
Set<Association> assocs = associationMap.keySet();
for (Association a : assocs) {
if (a.associationID() == assocId) {
return a;
}
}
}
return null;
}
private void addAssociation(Association association) {
synchronized (stateLock) {
int assocId = association.associationID();
Set<SocketAddress> addresses = null;
try {
addresses = SctpNet.getRemoteAddresses(fdVal, assocId);
} catch (IOException unused) {
/* OK, determining connected addresses may not be possible
* shutdown, connection lost, etc */
}
associationMap.put(association, addresses);
if (addresses != null) {
for (SocketAddress addr : addresses)
addressMap.put(addr, association);
}
}
}
private void removeAssociation(Association association) {
synchronized (stateLock) {
int assocId = association.associationID();
Set<SocketAddress> addresses = null;
try {
addresses = SctpNet.getRemoteAddresses(fdVal, assocId);
} catch (IOException unused) {
/* OK, determining connected addresses may not be possible
* shutdown, connection lost, etc */
}
Set<Association> assocs = associationMap.keySet();
for (Association a : assocs) {
if (a.associationID() == assocId) {
associationMap.remove(a);
break;
}
}
if (addresses != null) {
for (SocketAddress addr : addresses)
addressMap.remove(addr);
} else {
/* We cannot determine the connected addresses */
Set<java.util.Map.Entry<SocketAddress, Association>> addrAssocs =
addressMap.entrySet();
Iterator<Entry<SocketAddress, Association>> iterator = addrAssocs.iterator();
while (iterator.hasNext()) {
Entry<SocketAddress, Association> entry = iterator.next();
if (entry.getValue().equals(association)) {
iterator.remove();
}
}
}
}
}
/**
* @throws IllegalArgumentException
* If the given association is not controlled by this channel
*
* @return {@code true} if, and only if, the given association is one
* of the current associations controlled by this channel
*/
private boolean checkAssociation(Association messageAssoc) {
synchronized (stateLock) {
for (Association association : associationMap.keySet()) {
if (messageAssoc.equals(association)) {
return true;
}
}
}
throw new IllegalArgumentException(
"Given Association is not controlled by this channel");
}
private void checkStreamNumber(Association assoc, int streamNumber) {
synchronized (stateLock) {
if (streamNumber < 0 || streamNumber >= assoc.maxOutboundStreams())
throw new InvalidStreamException();
}
}
/* TODO: Add support for ttl and isComplete to both 121 12M
* SCTP_EOR not yet supported on reference platforms
* TTL support limited...
*/
@Override
public int send(ByteBuffer buffer, MessageInfo messageInfo)
throws IOException {
if (buffer == null)
throw new IllegalArgumentException("buffer cannot be null");
if (messageInfo == null)
throw new IllegalArgumentException("messageInfo cannot be null");
synchronized (sendLock) {
ensureOpen();
if (!isBound())
bind(null, 0);
int n = 0;
try {
int assocId = -1;
SocketAddress address = null;
begin();
synchronized (stateLock) {
if(!isOpen())
return 0;
senderThread = NativeThread.current();
/* Determine what address or association to send to */
Association assoc = messageInfo.association();
InetSocketAddress addr = (InetSocketAddress)messageInfo.address();
if (assoc != null) {
checkAssociation(assoc);
checkStreamNumber(assoc, messageInfo.streamNumber());
assocId = assoc.associationID();
/* have we also got a preferred address */
if (addr != null) {
if (!assoc.equals(addressMap.get(addr)))
throw new IllegalArgumentException("given preferred address is not part of this association");
address = addr;
}
} else if (addr != null) {
address = addr;
Association association = addressMap.get(addr);
if (association != null) {
checkStreamNumber(association, messageInfo.streamNumber());
assocId = association.associationID();
} else { /* must be new association */
SecurityManager sm = System.getSecurityManager();
if (sm != null)
sm.checkConnect(addr.getAddress().getHostAddress(),
addr.getPort());
}
} else {
throw new AssertionError(
"Both association and address cannot be null");
}
}
do {
n = send(fdVal, buffer, assocId, address, messageInfo);
} while ((n == IOStatus.INTERRUPTED) && isOpen());
return IOStatus.normalize(n);
} finally {
senderCleanup();
end((n > 0) || (n == IOStatus.UNAVAILABLE));
assert IOStatus.check(n);
}
}
}
private int send(int fd,
ByteBuffer src,
int assocId,
SocketAddress target,
MessageInfo messageInfo)
throws IOException {
int streamNumber = messageInfo.streamNumber();
boolean unordered = messageInfo.isUnordered();
int ppid = messageInfo.payloadProtocolID();
if (src instanceof DirectBuffer)
return sendFromNativeBuffer(fd, src, target, assocId,
streamNumber, unordered, ppid);
/* Substitute a native buffer */
int pos = src.position();
int lim = src.limit();
assert (pos <= lim && streamNumber >= 0);
int rem = (pos <= lim ? lim - pos : 0);
ByteBuffer bb = Util.getTemporaryDirectBuffer(rem);
try {
bb.put(src);
bb.flip();
/* Do not update src until we see how many bytes were written */
src.position(pos);
int n = sendFromNativeBuffer(fd, bb, target, assocId,
streamNumber, unordered, ppid);
if (n > 0) {
/* now update src */
src.position(pos + n);
}
return n;
} finally {
Util.releaseTemporaryDirectBuffer(bb);
}
}
private int sendFromNativeBuffer(int fd,
ByteBuffer bb,
SocketAddress target,
int assocId,
int streamNumber,
boolean unordered,
int ppid)
throws IOException {
InetAddress addr = null; // no preferred address
int port = 0;
if (target != null) {
InetSocketAddress isa = Net.checkAddress(target);
addr = isa.getAddress();
port = isa.getPort();
}
int pos = bb.position();
int lim = bb.limit();
assert (pos <= lim);
int rem = (pos <= lim ? lim - pos : 0);
int written = send0(fd, ((DirectBuffer)bb).address() + pos, rem, addr,
port, assocId, streamNumber, unordered, ppid);
if (written > 0)
bb.position(pos + written);
return written;
}
@Override
public SctpMultiChannel shutdown(Association association)
throws IOException {
synchronized (stateLock) {
checkAssociation(association);
if (!isOpen())
throw new ClosedChannelException();
SctpNet.shutdown(fdVal, association.associationID());
}
return this;
}
@Override
public Set<SocketAddress> getAllLocalAddresses()
throws IOException {
synchronized (stateLock) {
if (!isOpen())
throw new ClosedChannelException();
if (!isBound())
return Collections.emptySet();
return SctpNet.getLocalAddresses(fdVal);
}
}
@Override
public Set<SocketAddress> getRemoteAddresses(Association association)
throws IOException {
synchronized (stateLock) {
checkAssociation(association);
if (!isOpen())
throw new ClosedChannelException();
try {
return SctpNet.getRemoteAddresses(fdVal, association.associationID());
} catch (SocketException se) {
/* a valid association should always have remote addresses */
Set<SocketAddress> addrs = associationMap.get(association);
return addrs != null ? addrs : Collections.<SocketAddress>emptySet();
}
}
}
@Override
public SctpChannel branch(Association association)
throws IOException {
synchronized (stateLock) {
checkAssociation(association);
if (!isOpen())
throw new ClosedChannelException();
FileDescriptor bFd = SctpNet.branch(fdVal,
association.associationID());
/* successfully branched, we can now remove it from assoc list */
removeAssociation(association);
return new SctpChannelImpl(provider(), bFd, association);
}
}
/* Use common native implementation shared between
* one-to-one and one-to-many */
private static int receive0(int fd,
ResultContainer resultContainer,
long address,
int length)
throws IOException{
return SctpChannelImpl.receive0(fd, resultContainer, address,
length, false /*peek */);
}
private static int send0(int fd,
long address,
int length,
InetAddress addr,
int port,
int assocId,
int streamNumber,
boolean unordered,
int ppid)
throws IOException {
return SctpChannelImpl.send0(fd, address, length, addr, port, assocId,
streamNumber, unordered, ppid);
}
static {
IOUtil.load(); /* loads nio & net native libraries */
java.security.AccessController.doPrivileged(
new java.security.PrivilegedAction<Void>() {
public Void run() {
System.loadLibrary("sctp");
return null;
}
});
}
}
|
apache/ignite | 37,557 | modules/core/src/main/java/org/apache/ignite/internal/cdc/CdcMain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.cdc;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Predicate;
import java.util.stream.Stream;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.binary.BinaryType;
import org.apache.ignite.cdc.CdcCacheEvent;
import org.apache.ignite.cdc.CdcConfiguration;
import org.apache.ignite.cdc.CdcConsumer;
import org.apache.ignite.cdc.CdcEvent;
import org.apache.ignite.cdc.TypeMapping;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.GridLoggerProxy;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.binary.BinaryUtils;
import org.apache.ignite.internal.cdc.WalRecordsConsumer.DataEntryIterator;
import org.apache.ignite.internal.pagemem.wal.WALIterator;
import org.apache.ignite.internal.pagemem.wal.record.CdcManagerRecord;
import org.apache.ignite.internal.pagemem.wal.record.DataRecord;
import org.apache.ignite.internal.pagemem.wal.record.WALRecord;
import org.apache.ignite.internal.processors.cache.GridLocalConfigManager;
import org.apache.ignite.internal.processors.cache.persistence.filename.NodeFileTree;
import org.apache.ignite.internal.processors.cache.persistence.filename.PdsFolderResolver;
import org.apache.ignite.internal.processors.cache.persistence.filename.PdsFolderSettings;
import org.apache.ignite.internal.processors.cache.persistence.wal.WALPointer;
import org.apache.ignite.internal.processors.cache.persistence.wal.reader.IgniteWalIteratorFactory;
import org.apache.ignite.internal.processors.cache.persistence.wal.reader.StandaloneGridKernalContext;
import org.apache.ignite.internal.processors.cache.persistence.wal.reader.StandaloneSpiContext;
import org.apache.ignite.internal.processors.metric.MetricRegistryImpl;
import org.apache.ignite.internal.processors.metric.impl.AtomicLongMetric;
import org.apache.ignite.internal.processors.metric.impl.HistogramMetricImpl;
import org.apache.ignite.internal.processors.resource.GridSpringResourceContext;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.platform.PlatformType;
import org.apache.ignite.spi.IgniteSpi;
import org.apache.ignite.spi.metric.jmx.JmxMetricExporterSpi;
import org.apache.ignite.spi.metric.noop.NoopMetricExporterSpi;
import org.apache.ignite.startup.cmdline.CdcCommandLineStartup;
import static org.apache.ignite.internal.IgniteKernal.NL;
import static org.apache.ignite.internal.IgniteKernal.SITE;
import static org.apache.ignite.internal.IgniteVersionUtils.ACK_VER_STR;
import static org.apache.ignite.internal.IgniteVersionUtils.COPYRIGHT;
import static org.apache.ignite.internal.IgnitionEx.initializeDefaultMBeanServer;
import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.CDC_DATA_RECORD;
import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.CDC_MANAGER_RECORD;
import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.CDC_MANAGER_STOP_RECORD;
import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.DATA_RECORD_V2;
import static org.apache.ignite.internal.processors.cache.persistence.wal.reader.StandaloneGridKernalContext.closeAllComponents;
import static org.apache.ignite.internal.processors.cache.persistence.wal.reader.StandaloneGridKernalContext.startAllComponents;
import static org.apache.ignite.internal.processors.metric.impl.MetricUtils.metricName;
/**
* Change Data Capture (CDC) application.
* The application runs independently of Ignite node process and provides the ability
* for the {@link CdcConsumer} to consume events({@link CdcEvent}) from WAL segments.
* The user should provide {@link CdcConsumer} implementation with custom consumption logic.
*
* Ignite node should be explicitly configured for using {@link CdcMain}.
* <ol>
* <li>Set {@link DataRegionConfiguration#setCdcEnabled(boolean)} to true.</li>
* <li>Optional: Set {@link DataStorageConfiguration#setCdcWalPath(String)} to path to the directory
* to store WAL segments for CDC.</li>
* <li>Optional: Set {@link DataStorageConfiguration#setWalForceArchiveTimeout(long)} to configure timeout for
* force WAL rollover, so new events will be available for consumptions with the predicted time.</li>
* </ol>
*
* When {@link DataStorageConfiguration#getCdcWalPath()} is true then Ignite node on each WAL segment
* rollover creates hard link to archive WAL segment in
* {@link DataStorageConfiguration#getCdcWalPath()} directory. {@link CdcMain} application takes
* segment file and consumes events from it.
* After successful consumption (see {@link CdcConsumer#onEvents(Iterator)}) WAL segment will be deleted
* from directory.
*
* Several Ignite nodes can be started on the same host.
* If your deployment done with custom consistent id then you should specify it via
* {@link IgniteConfiguration#setConsistentId(Serializable)} in provided {@link IgniteConfiguration}.
*
* Application works as follows:
* <ol>
* <li>Searches node work directory based on provided {@link IgniteConfiguration}.</li>
* <li>Awaits for the creation of CDC directory if it not exists.</li>
* <li>Acquires file lock to ensure exclusive consumption.</li>
* <li>Loads state of consumption if it exists.</li>
* <li>Infinitely waits for new available segment and processes it.</li>
* </ol>
*
* @see DataRegionConfiguration#setCdcEnabled(boolean)
* @see DataStorageConfiguration#setCdcWalPath(String)
* @see DataStorageConfiguration#setWalForceArchiveTimeout(long)
* @see CdcCommandLineStartup
* @see CdcConsumer
* @see DataStorageConfiguration#DFLT_WAL_CDC_PATH
*/
public class CdcMain implements Runnable {
/** */
public static final String ERR_MSG = "Persistence and CDC disabled. Capture Data Change can't run!";
/** Current segment index metric name. */
public static final String CUR_SEG_IDX = "CurrentSegmentIndex";
/** Committed segment index metric name. */
public static final String COMMITTED_SEG_IDX = "CommittedSegmentIndex";
/** Committed segment offset metric name. */
public static final String COMMITTED_SEG_OFFSET = "CommittedSegmentOffset";
/** Last segment consumption time. */
public static final String LAST_SEG_CONSUMPTION_TIME = "LastSegmentConsumptionTime";
/** Metadata update time. */
public static final String META_UPDATE = "MetadataUpdateTime";
/** Event capture time. */
public static final String EVT_CAPTURE_TIME = "EventCaptureTime";
/** Wal segment iterator consuming time. */
public static final String SEGMENT_CONSUMING_TIME = "SegmentConsumingTime";
/** Binary metadata metric name. */
public static final String BINARY_META_DIR = "BinaryMetaDir";
/** Marshaller metric name. */
public static final String MARSHALLER_DIR = "MarshallerDir";
/** Cdc directory metric name. */
public static final String CDC_DIR = "CdcDir";
/** Cdc mode metric name. */
public static final String CDC_MODE = "CdcMode";
/** Filter for consumption in {@link CdcMode#IGNITE_NODE_ACTIVE} mode. */
private static final IgniteBiPredicate<WALRecord.RecordType, WALPointer> PASSIVE_RECS =
(type, ptr) -> type == CDC_MANAGER_STOP_RECORD || type == CDC_MANAGER_RECORD;
/** Filter for consumption in {@link CdcMode#CDC_UTILITY_ACTIVE} mode. */
private static final IgniteBiPredicate<WALRecord.RecordType, WALPointer> ACTIVE_RECS =
(type, ptr) -> type == DATA_RECORD_V2 || type == CDC_DATA_RECORD;
/** Ignite configuration. */
private final IgniteConfiguration igniteCfg;
/** Spring resource context. */
private final GridSpringResourceContext ctx;
/** CDC metrics registry. */
private MetricRegistryImpl mreg;
/** Current segment index metric. */
private AtomicLongMetric curSegmentIdx;
/** Committed state segment index metric. */
private AtomicLongMetric committedSegmentIdx;
/** Committed state segment offset metric. */
private AtomicLongMetric committedSegmentOffset;
/** Time of last segment consumption. */
private AtomicLongMetric lastSegmentConsumptionTs;
/** Metadata update time. */
private HistogramMetricImpl metaUpdate;
/**
* Metric represents time between creating {@link DataRecord}, containing the data change events, and capturing them
* by {@link CdcConsumer}.
*/
private HistogramMetricImpl evtCaptureTime;
/** Metric represents time between creating {@link WALIterator} and finish consuming it, in milliseconds. */
private HistogramMetricImpl segmentConsumingTime;
/** Change Data Capture configuration. */
protected final CdcConfiguration cdcCfg;
/** Events consumer. */
private final WalRecordsConsumer<?, ?> consumer;
/** Logger. */
private final IgniteLogger log;
/** Ignite folders. */
private NodeFileTree ft;
/** Standalone kernal context. */
private StandaloneGridKernalContext kctx;
/** Change Data Capture state. */
private CdcConsumerState state;
/**
* Saved state to start from. Points to the last committed offset. Set to {@code null} after failover on start and
* switching from {@link CdcMode#IGNITE_NODE_ACTIVE} to {@link CdcMode#CDC_UTILITY_ACTIVE}.
*
* @see #removeProcessedOnFailover(Path)
* @see #consumeSegmentActively(IgniteWalIteratorFactory.IteratorParametersBuilder)
*/
private T2<WALPointer, Integer> walState;
/** Types state. */
private Map<Integer, Long> typesState;
/** Mappings state. */
private Set<T2<Integer, Byte>> mappingsState;
/** Caches state. */
private Map<Integer, Long> cachesState;
/** CDC mode state. */
private volatile CdcMode cdcModeState;
/** Stopped flag. */
private volatile boolean started;
/** Stopped flag. */
private volatile boolean stopped;
/** Already processed segments. */
private final Set<Path> processedSegments = new HashSet<>();
/**
* @param cfg Ignite configuration.
* @param ctx Spring resource context.
* @param cdcCfg Change Data Capture configuration.
*/
public CdcMain(
IgniteConfiguration cfg,
GridSpringResourceContext ctx,
CdcConfiguration cdcCfg
) {
igniteCfg = new IgniteConfiguration(cfg);
this.ctx = ctx;
this.cdcCfg = cdcCfg;
try {
U.initWorkDir(igniteCfg);
log = U.initLogger(igniteCfg, "ignite-cdc");
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
consumer = new WalRecordsConsumer<>(cdcCfg.getConsumer(), log);
}
/** Runs Change Data Capture. */
@Override public void run() {
synchronized (this) {
if (stopped)
return;
}
try {
runX();
}
catch (Throwable e) {
log.error("Cdc error", e);
throw new IgniteException(e);
}
}
/** Runs Change Data Capture application with possible exception. */
public void runX() throws Exception {
ackAsciiLogo();
if (!CU.isCdcEnabled(igniteCfg)) {
log.error(ERR_MSG);
throw new IllegalArgumentException(ERR_MSG);
}
try (CdcFileLockHolder lock = lockPds()) {
Files.createDirectories(ft.cdcState());
if (log.isInfoEnabled()) {
log.info("Change Data Capture [dir=" + ft.walCdc() + ']');
log.info("Ignite node Binary meta [dir=" + ft.binaryMeta() + ']');
log.info("Ignite node Marshaller [dir=" + ft.marshaller() + ']');
}
startStandaloneKernal();
initMetrics();
try {
kctx.resource().injectGeneric(consumer.consumer());
state = createState(ft);
walState = state.loadWalState();
typesState = state.loadTypesState();
mappingsState = state.loadMappingsState();
cachesState = state.loadCaches();
cdcModeState = state.loadCdcMode();
if (walState != null) {
committedSegmentIdx.value(walState.get1().index());
committedSegmentOffset.value(walState.get1().fileOffset());
}
consumer.start(mreg, kctx.metric().registry(metricName("cdc", "consumer")));
started = true;
try {
consumeWalSegmentsUntilStopped();
}
finally {
stop();
}
}
finally {
closeAllComponents(kctx);
if (log.isInfoEnabled())
log.info("Ignite Change Data Capture Application stopped.");
}
}
}
/** Creates consumer state. */
protected CdcConsumerState createState(NodeFileTree ft) {
return new CdcConsumerState(log, ft);
}
/**
* @throws IgniteCheckedException If failed.
*/
private void startStandaloneKernal() throws IgniteCheckedException {
kctx = new StandaloneGridKernalContext(log, ft) {
@Override protected IgniteConfiguration prepareIgniteConfiguration() {
IgniteConfiguration cfg = super.prepareIgniteConfiguration();
cfg.setIgniteInstanceName(cdcInstanceName(igniteCfg.getIgniteInstanceName()));
cfg.setWorkDirectory(igniteCfg.getWorkDirectory());
if (!F.isEmpty(cdcCfg.getMetricExporterSpi()))
cfg.setMetricExporterSpi(cdcCfg.getMetricExporterSpi());
else {
cfg.setMetricExporterSpi(U.IGNITE_MBEANS_DISABLED
? new NoopMetricExporterSpi()
: new JmxMetricExporterSpi());
}
initializeDefaultMBeanServer(cfg);
return cfg;
}
/** {@inheritDoc} */
@Override public String igniteInstanceName() {
return config().getIgniteInstanceName();
}
};
kctx.resource().setSpringContext(ctx);
startAllComponents(kctx);
for (IgniteSpi metricSpi : kctx.config().getMetricExporterSpi()) {
metricSpi.onContextInitialized(new StandaloneSpiContext());
}
mreg = kctx.metric().registry("cdc");
}
/** Initialize metrics. */
private void initMetrics() {
mreg.objectMetric(BINARY_META_DIR, String.class, "Binary meta directory").value(ft.binaryMeta().getAbsolutePath());
mreg.objectMetric(MARSHALLER_DIR, String.class, "Marshaller directory").value(ft.marshaller().getAbsolutePath());
mreg.objectMetric(CDC_DIR, String.class, "CDC directory").value(ft.walCdc().getAbsolutePath());
curSegmentIdx = mreg.longMetric(CUR_SEG_IDX, "Current segment index");
committedSegmentIdx = mreg.longMetric(COMMITTED_SEG_IDX, "Committed segment index");
committedSegmentOffset = mreg.longMetric(COMMITTED_SEG_OFFSET, "Committed segment offset");
lastSegmentConsumptionTs =
mreg.longMetric(LAST_SEG_CONSUMPTION_TIME, "Last time of consumption of WAL segment");
metaUpdate = mreg.histogram(META_UPDATE, new long[] {100, 500, 1000}, "Metadata update time");
evtCaptureTime = mreg.histogram(
EVT_CAPTURE_TIME,
new long[] {5_000, 10_000, 15_000, 30_000, 60_000},
"Time between creating an event on Ignite node and capturing it by CdcConsumer");
segmentConsumingTime = mreg.histogram(
SEGMENT_CONSUMING_TIME,
new long[] {25, 50, 100, 250, 500, 1000, 2500, 5000, 10000, 25000, 50000},
"Time of WAL segment consumption by consumer, in milliseconds.");
mreg.register(CDC_MODE, () -> cdcModeState.name(), String.class, "CDC mode");
}
/**
* @return CDC lock holder for specifi folder.
* @throws IgniteCheckedException If failed.
*/
private CdcFileLockHolder lockPds() throws IgniteCheckedException {
PdsFolderSettings<CdcFileLockHolder> settings =
new PdsFolderResolver<>(igniteCfg, log, igniteCfg.getConsistentId(), this::tryLock).resolve();
if (settings == null) {
throw new IgniteException("Can't find the folder to read WAL segments from! " +
"[workDir=" + igniteCfg.getWorkDirectory() + ", consistentId=" + igniteCfg.getConsistentId() + ']');
}
ft = fileTree(settings.folderName());
CdcFileLockHolder lock = settings.getLockedFileLockHolder();
if (lock == null) {
File consIdDir = ft.nodeStorage();
lock = tryLock(consIdDir);
if (lock == null) {
throw new IgniteException(
"Can't acquire lock for Change Data Capture folder [dir=" + consIdDir.getAbsolutePath() + ']'
);
}
}
return lock;
}
/** Waits and consumes new WAL segments until stopped. */
public void consumeWalSegmentsUntilStopped() {
try {
Set<Path> seen = new HashSet<>();
AtomicLong lastSgmnt = new AtomicLong(-1);
while (!stopped) {
if (!consumer.alive()) {
log.warning("Consumer is not alive. Ignite Change Data Capture Application will be stopped.");
return;
}
try (Stream<Path> cdcFiles = Files.list(ft.walCdc().toPath())) {
Set<Path> exists = new HashSet<>();
Iterator<Path> segments = cdcFiles
.peek(exists::add) // Store files that exists in cdc dir.
// Need unseen WAL segments only.
.filter(p -> NodeFileTree.walSegment(p.toFile()) && !seen.contains(p))
.peek(seen::add) // Adds to seen.
.sorted(Comparator.comparingLong(ft::walSegmentIndex)) // Sort by segment index.
.peek(p -> {
long nextSgmnt = ft.walSegmentIndex(p);
if (lastSgmnt.get() != -1 && nextSgmnt - lastSgmnt.get() != 1) {
throw new IgniteException("Found missed segments. Some events are missed. Exiting! " +
"[lastSegment=" + lastSgmnt.get() + ", nextSegment=" + nextSgmnt + ']');
}
lastSgmnt.set(nextSgmnt);
}).iterator();
while (segments.hasNext()) {
Path segment = segments.next();
if (walState != null && removeProcessedOnFailover(segment))
continue;
if (consumeSegment(segment)) {
// CDC mode switched. Reset partitions info to handle them again actively.
seen.clear();
lastSgmnt.set(-1);
walState = state.loadWalState();
break;
}
walState = null;
}
seen.removeIf(p -> !exists.contains(p)); // Clean up seen set.
if (lastSgmnt.get() == -1) //Forcefully updating metadata if no new segments found.
updateMetadata();
}
if (!stopped)
U.sleep(cdcCfg.getCheckFrequency());
}
}
catch (IOException | IgniteInterruptedCheckedException e) {
throw new IgniteException(e);
}
}
/**
* Reads all available records from segment.
*
* @return {@code true} if mode switched.
*/
private boolean consumeSegment(Path segment) {
updateMetadata();
if (log.isInfoEnabled())
log.info("Processing WAL segment [segment=" + segment + ']');
IgniteWalIteratorFactory.IteratorParametersBuilder builder =
new IgniteWalIteratorFactory.IteratorParametersBuilder()
.log(log)
.fileTree(ft)
.igniteConfigurationModifier((cfg) -> cfg.setPluginProviders(igniteCfg.getPluginProviders()))
.keepBinary(cdcCfg.isKeepBinary())
.filesOrDirs(segment.toFile());
if (igniteCfg.getDataStorageConfiguration().getPageSize() != 0)
builder.pageSize(igniteCfg.getDataStorageConfiguration().getPageSize());
if (walState != null)
builder.from(walState.get1());
long segmentIdx = ft.walSegmentIndex(segment);
lastSegmentConsumptionTs.value(System.currentTimeMillis());
curSegmentIdx.value(segmentIdx);
long start = U.currentTimeMillis();
if (cdcModeState == CdcMode.IGNITE_NODE_ACTIVE) {
if (consumeSegmentPassively(builder))
return true;
}
else
consumeSegmentActively(builder);
segmentConsumingTime.value(U.currentTimeMillis() - start);
processedSegments.add(segment);
return false;
}
/**
* Consumes CDC events in {@link CdcMode#CDC_UTILITY_ACTIVE} mode.
*/
private void consumeSegmentActively(IgniteWalIteratorFactory.IteratorParametersBuilder builder) {
try (DataEntryIterator iter = new DataEntryIterator(
new IgniteWalIteratorFactory(log).iterator(builder.addFilter(ACTIVE_RECS)),
evtCaptureTime)
) {
if (walState != null)
iter.init(walState.get2());
boolean interrupted;
do {
boolean commit = consumer.onRecords(iter, WalRecordsConsumer.CDC_EVENT_TRANSFORMER, null);
if (commit)
saveStateAndRemoveProcessed(iter.state());
interrupted = Thread.interrupted();
} while (iter.hasNext() && !interrupted);
if (interrupted)
throw new IgniteException("Change Data Capture Application interrupted");
}
catch (IgniteCheckedException | IOException e) {
throw new IgniteException(e);
}
}
/**
* Consumes CDC events in {@link CdcMode#IGNITE_NODE_ACTIVE} mode.
*
* @return {@code true} if mode switched.
*/
private boolean consumeSegmentPassively(IgniteWalIteratorFactory.IteratorParametersBuilder builder) {
try (WALIterator iter = new IgniteWalIteratorFactory(log).iterator(builder.addFilter(PASSIVE_RECS))) {
boolean interrupted = false;
while (iter.hasNext() && !interrupted) {
IgniteBiTuple<WALPointer, WALRecord> next = iter.next();
WALRecord walRecord = next.get2();
switch (walRecord.type()) {
case CDC_MANAGER_RECORD:
saveStateAndRemoveProcessed(((CdcManagerRecord)walRecord).walState());
break;
case CDC_MANAGER_STOP_RECORD:
state.saveCdcMode((cdcModeState = CdcMode.CDC_UTILITY_ACTIVE));
if (log.isInfoEnabled())
log.info("CDC mode switched [mode=" + cdcModeState + ']');
return true;
default:
throw new IgniteException("Unexpected record [type=" + walRecord.type() + ']');
}
interrupted = Thread.interrupted();
}
if (interrupted)
throw new IgniteException("Change Data Capture Application interrupted");
return false;
}
catch (IgniteCheckedException | IOException e) {
throw new IgniteException(e);
}
}
/** Metadata update. */
private void updateMetadata() {
long start = System.currentTimeMillis();
updateMappings();
updateTypes();
updateCaches();
metaUpdate.value(System.currentTimeMillis() - start);
}
/** Search for new or changed {@link BinaryType} and notifies the consumer. */
private void updateTypes() {
try {
File[] files = ft.binaryMeta().listFiles();
if (files == null)
return;
Iterator<BinaryType> changedTypes = Arrays.stream(files)
.filter(NodeFileTree::binFile)
.map(f -> {
int typeId = NodeFileTree.typeId(f.getName());
long lastModified = f.lastModified();
// Filter out files already in `typesState` with the same last modify date.
if (typesState.containsKey(typeId) && lastModified == typesState.get(typeId))
return null;
typesState.put(typeId, lastModified);
try {
kctx.cacheObjects().cacheMetadataLocally(ft, typeId);
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
return kctx.cacheObjects().metadata(typeId);
})
.filter(Objects::nonNull)
.iterator();
if (!changedTypes.hasNext())
return;
consumer.onTypes(changedTypes);
if (changedTypes.hasNext())
throw new IllegalStateException("Consumer should handle all changed types");
state.saveTypes(typesState);
}
catch (IOException e) {
throw new IgniteException(e);
}
}
/** Search for new or changed {@link TypeMapping} and notifies the consumer. */
private void updateMappings() {
try {
File[] files = ft.marshaller().listFiles(NodeFileTree::notTmpFile);
if (files == null)
return;
Iterator<TypeMapping> changedMappings = typeMappingIterator(
files,
tm -> mappingsState.add(new T2<>(tm.typeId(), (byte)tm.platformType().ordinal()))
);
if (!changedMappings.hasNext())
return;
consumer.onMappings(changedMappings);
if (changedMappings.hasNext())
throw new IllegalStateException("Consumer should handle all changed mappings");
state.saveMappings(mappingsState);
}
catch (IOException e) {
throw new IgniteException(e);
}
}
/** Search for new or changed {@link CdcCacheEvent} and notifies the consumer. */
private void updateCaches() {
try {
if (ft.allStorages().noneMatch(File::exists))
return;
Set<Integer> destroyed = new HashSet<>(cachesState.keySet());
Iterator<CdcCacheEvent> cacheEvts = GridLocalConfigManager
.readCachesData(
ft,
kctx.marshallerContext().jdkMarshaller(),
igniteCfg)
.entrySet().stream()
.map(data -> {
int cacheId = data.getValue().cacheId();
long lastModified = data.getKey().lastModified();
destroyed.remove(cacheId);
Long lastModified0 = cachesState.get(cacheId);
if (lastModified0 != null && lastModified0 == lastModified)
return null;
cachesState.put(cacheId, lastModified);
return (CdcCacheEvent)data.getValue();
})
.filter(Objects::nonNull)
.iterator();
consumer.onCacheEvents(cacheEvts);
if (cacheEvts.hasNext())
throw new IllegalStateException("Consumer should handle all cache change events");
if (!destroyed.isEmpty()) {
Iterator<Integer> destroyedIter = destroyed.iterator();
consumer.onCacheDestroyEvents(destroyedIter);
if (destroyedIter.hasNext())
throw new IllegalStateException("Consumer should handle all cache destroy events");
}
state.saveCaches(cachesState);
}
catch (IOException e) {
throw new IgniteException(e);
}
}
/**
* Remove segment file if it already processed. {@link #walState} points to the last committed offset so all files
* before this offset can be removed.
*
* @param segment Segment to check.
* @return {@code True} if segment file was deleted, {@code false} otherwise.
*/
private boolean removeProcessedOnFailover(Path segment) {
long segmentIdx = ft.walSegmentIndex(segment);
if (segmentIdx > walState.get1().index()) {
throw new IgniteException("Found segment greater then saved state. Some events are missed. Exiting! " +
"[state=" + walState + ", segment=" + segmentIdx + ']');
}
if (segmentIdx < walState.get1().index()) {
if (log.isInfoEnabled()) {
log.info("Already processed segment found. Skipping and deleting the file [segment=" +
segmentIdx + ", state=" + walState.get1().index() + ']');
}
// WAL segment is a hard link to a segment file in the special Change Data Capture folder.
// So, we can safely delete it after processing.
try {
Files.delete(segment);
return true;
}
catch (IOException e) {
throw new IgniteException(e);
}
}
return false;
}
/** Saves WAL consumption state and delete segments that no longer required. */
private void saveStateAndRemoveProcessed(T2<WALPointer, Integer> curState) throws IOException {
if (curState == null)
return;
if (log.isDebugEnabled())
log.debug("Saving state [curState=" + curState + ']');
state.saveWal(curState);
committedSegmentIdx.value(curState.get1().index());
committedSegmentOffset.value(curState.get1().fileOffset());
Iterator<Path> rmvIter = processedSegments.iterator();
while (rmvIter.hasNext()) {
Path processedSegment = rmvIter.next();
// Can't delete current segment, because state points to it.
if (ft.walSegmentIndex(processedSegment) >= curState.get1().index())
continue;
// WAL segment is a hard link to a segment file in a specifal Change Data Capture folder.
// So we can safely delete it after success processing.
Files.delete(processedSegment);
rmvIter.remove();
}
}
/**
* Try locks Change Data Capture directory.
*
* @param dbStoreDirWithSubdirectory Root PDS directory.
* @return Lock or null if lock failed.
*/
private CdcFileLockHolder tryLock(File dbStoreDirWithSubdirectory) {
if (!dbStoreDirWithSubdirectory.exists()) {
log.warning("DB store directory not exists. Should be created by Ignite Node " +
" [dir=" + dbStoreDirWithSubdirectory + ']');
return null;
}
ft = fileTree(dbStoreDirWithSubdirectory.getName());
if (!ft.walCdc().exists()) {
log.warning("CDC directory not exists. Should be created by Ignite Node. " +
"Is Change Data Capture enabled in IgniteConfiguration? [dir=" + ft.walCdc() + ']');
return null;
}
CdcFileLockHolder lock = new CdcFileLockHolder(ft.walCdc().toString(), "cdc.lock", log);
try {
lock.tryLock(cdcCfg.getLockTimeout());
return lock;
}
catch (IgniteCheckedException e) {
U.closeQuiet(lock);
if (log.isInfoEnabled()) {
log.info("Unable to acquire lock to lock CDC folder [dir=" + ft.walCdc() + "]" + NL +
"Reason: " + e.getMessage());
}
return null;
}
}
/** Stops the application. */
public void stop() {
synchronized (this) {
if (stopped || !started)
return;
if (log.isInfoEnabled())
log.info("Stopping Change Data Capture service instance");
stopped = true;
consumer.stop();
}
}
/** */
private void ackAsciiLogo() {
String ver = "ver. " + ACK_VER_STR;
if (log.isInfoEnabled()) {
log.info(NL + NL +
">>> __________ ________________ ________ _____" + NL +
">>> / _/ ___/ |/ / _/_ __/ __/ / ___/ _ \\/ ___/" + NL +
">>> _/ // (7 7 // / / / / _/ / /__/ // / /__ " + NL +
">>> /___/\\___/_/|_/___/ /_/ /___/ \\___/____/\\___/ " + NL +
">>> " + NL +
">>> " + ver + NL +
">>> " + COPYRIGHT + NL +
">>> " + NL +
">>> Ignite documentation: " + "http://" + SITE + NL +
">>> Consumer: " + U.toStringSafe(consumer.consumer()) + NL +
">>> ConsistentId: " + igniteCfg.getConsistentId() + NL
);
}
if (log.isQuiet()) {
U.quiet(false,
" __________ ________________ ________ _____",
" / _/ ___/ |/ / _/_ __/ __/ / ___/ _ \\/ ___/",
" _/ // (7 7 // / / / / _/ / /__/ // / /__ ",
"/___/\\___/_/|_/___/ /_/ /___/ \\___/____/\\___/ ",
"",
ver,
COPYRIGHT,
"",
"Ignite documentation: " + "http://" + SITE,
"Consumer: " + U.toStringSafe(consumer.consumer()),
"ConsistentId: " + igniteCfg.getConsistentId(),
"",
"Quiet mode.");
String fileName = log.fileName();
if (fileName != null)
U.quiet(false, " ^-- Logging to file '" + fileName + '\'');
if (log instanceof GridLoggerProxy)
U.quiet(false, " ^-- Logging by '" + ((GridLoggerProxy)log).getLoggerInfo() + '\'');
U.quiet(false,
" ^-- To see **FULL** console log here add -DIGNITE_QUIET=false or \"-v\" to ignite-cdc.{sh|bat}",
"");
}
}
/** */
public static String cdcInstanceName(String igniteInstanceName) {
return "cdc-" + igniteInstanceName;
}
/**
* @param files Mapping files.
* @param filter Filter.
* @return Type mapping iterator.
*/
public static Iterator<TypeMapping> typeMappingIterator(File[] files, Predicate<TypeMapping> filter) {
return Arrays.stream(files)
.map(f -> {
String fileName = f.getName();
int typeId = BinaryUtils.mappedTypeId(fileName);
byte platformId = BinaryUtils.mappedFilePlatformId(fileName);
return (TypeMapping)new TypeMappingImpl(
typeId,
BinaryUtils.readMapping(f),
platformId == 0 ? PlatformType.JAVA : PlatformType.DOTNET);
})
.filter(filter)
.filter(Objects::nonNull)
.iterator();
}
/**
* @param folderName Folder name
* @return Node file tree.
*/
private NodeFileTree fileTree(String folderName) {
return new NodeFileTree(igniteCfg, folderName);
}
}
|
googleapis/google-cloud-java | 37,437 | java-functions/proto-google-cloud-functions-v1/src/main/java/com/google/cloud/functions/v1/SecretEnvVar.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/functions/v1/functions.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.functions.v1;
/**
*
*
* <pre>
* Configuration for a secret environment variable. It has the information
* necessary to fetch the secret value from secret manager and expose it as an
* environment variable.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v1.SecretEnvVar}
*/
public final class SecretEnvVar extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.functions.v1.SecretEnvVar)
SecretEnvVarOrBuilder {
private static final long serialVersionUID = 0L;
// Use SecretEnvVar.newBuilder() to construct.
private SecretEnvVar(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SecretEnvVar() {
key_ = "";
projectId_ = "";
secret_ = "";
version_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SecretEnvVar();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v1.FunctionsProto
.internal_static_google_cloud_functions_v1_SecretEnvVar_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v1.FunctionsProto
.internal_static_google_cloud_functions_v1_SecretEnvVar_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v1.SecretEnvVar.class,
com.google.cloud.functions.v1.SecretEnvVar.Builder.class);
}
public static final int KEY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object key_ = "";
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The key.
*/
@java.lang.Override
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The bytes for key.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the project
* ID) of the project that contains the secret. If not set, it will be
* populated with the function's project assuming that the secret exists in
* the same project as of the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The projectId.
*/
@java.lang.Override
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the project
* ID) of the project that contains the secret. If not set, it will be
* populated with the function's project assuming that the secret exists in
* the same project as of the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The bytes for projectId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SECRET_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object secret_ = "";
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The secret.
*/
@java.lang.Override
public java.lang.String getSecret() {
java.lang.Object ref = secret_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
secret_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The bytes for secret.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSecretBytes() {
java.lang.Object ref = secret_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
secret_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VERSION_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object version_ = "";
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The version.
*/
@java.lang.Override
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
}
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The bytes for version.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secret_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, secret_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secret_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, secret_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.functions.v1.SecretEnvVar)) {
return super.equals(obj);
}
com.google.cloud.functions.v1.SecretEnvVar other =
(com.google.cloud.functions.v1.SecretEnvVar) obj;
if (!getKey().equals(other.getKey())) return false;
if (!getProjectId().equals(other.getProjectId())) return false;
if (!getSecret().equals(other.getSecret())) return false;
if (!getVersion().equals(other.getVersion())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (37 * hash) + SECRET_FIELD_NUMBER;
hash = (53 * hash) + getSecret().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v1.SecretEnvVar parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.functions.v1.SecretEnvVar prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration for a secret environment variable. It has the information
* necessary to fetch the secret value from secret manager and expose it as an
* environment variable.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v1.SecretEnvVar}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.functions.v1.SecretEnvVar)
com.google.cloud.functions.v1.SecretEnvVarOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v1.FunctionsProto
.internal_static_google_cloud_functions_v1_SecretEnvVar_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v1.FunctionsProto
.internal_static_google_cloud_functions_v1_SecretEnvVar_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v1.SecretEnvVar.class,
com.google.cloud.functions.v1.SecretEnvVar.Builder.class);
}
// Construct using com.google.cloud.functions.v1.SecretEnvVar.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
key_ = "";
projectId_ = "";
secret_ = "";
version_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.functions.v1.FunctionsProto
.internal_static_google_cloud_functions_v1_SecretEnvVar_descriptor;
}
@java.lang.Override
public com.google.cloud.functions.v1.SecretEnvVar getDefaultInstanceForType() {
return com.google.cloud.functions.v1.SecretEnvVar.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.functions.v1.SecretEnvVar build() {
com.google.cloud.functions.v1.SecretEnvVar result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.functions.v1.SecretEnvVar buildPartial() {
com.google.cloud.functions.v1.SecretEnvVar result =
new com.google.cloud.functions.v1.SecretEnvVar(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.functions.v1.SecretEnvVar result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.key_ = key_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.projectId_ = projectId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.secret_ = secret_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.version_ = version_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.functions.v1.SecretEnvVar) {
return mergeFrom((com.google.cloud.functions.v1.SecretEnvVar) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.functions.v1.SecretEnvVar other) {
if (other == com.google.cloud.functions.v1.SecretEnvVar.getDefaultInstance()) return this;
if (!other.getKey().isEmpty()) {
key_ = other.key_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getSecret().isEmpty()) {
secret_ = other.secret_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getVersion().isEmpty()) {
version_ = other.version_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
key_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
projectId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
secret_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
version_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object key_ = "";
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The key.
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return The bytes for key.
*/
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @param value The key to set.
* @return This builder for chaining.
*/
public Builder setKey(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearKey() {
key_ = getDefaultInstance().getKey();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the environment variable.
* </pre>
*
* <code>string key = 1;</code>
*
* @param value The bytes for key to set.
* @return This builder for chaining.
*/
public Builder setKeyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
key_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the project
* ID) of the project that contains the secret. If not set, it will be
* populated with the function's project assuming that the secret exists in
* the same project as of the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The projectId.
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the project
* ID) of the project that contains the secret. If not set, it will be
* populated with the function's project assuming that the secret exists in
* the same project as of the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return The bytes for projectId.
*/
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the project
* ID) of the project that contains the secret. If not set, it will be
* populated with the function's project assuming that the secret exists in
* the same project as of the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @param value The projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the project
* ID) of the project that contains the secret. If not set, it will be
* populated with the function's project assuming that the secret exists in
* the same project as of the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project identifier (preferably project number but can also be the project
* ID) of the project that contains the secret. If not set, it will be
* populated with the function's project assuming that the secret exists in
* the same project as of the function.
* </pre>
*
* <code>string project_id = 2;</code>
*
* @param value The bytes for projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object secret_ = "";
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The secret.
*/
public java.lang.String getSecret() {
java.lang.Object ref = secret_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
secret_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return The bytes for secret.
*/
public com.google.protobuf.ByteString getSecretBytes() {
java.lang.Object ref = secret_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
secret_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @param value The secret to set.
* @return This builder for chaining.
*/
public Builder setSecret(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
secret_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearSecret() {
secret_ = getDefaultInstance().getSecret();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the secret in secret manager (not the full resource name).
* </pre>
*
* <code>string secret = 3;</code>
*
* @param value The bytes for secret to set.
* @return This builder for chaining.
*/
public Builder setSecretBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
secret_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object version_ = "";
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The version.
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The bytes for version.
*/
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances start.
* </pre>
*
* <code>string version = 4;</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances start.
* </pre>
*
* <code>string version = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = getDefaultInstance().getVersion();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Version of the secret (version number or the string 'latest'). It is
* recommended to use a numeric version for secret environment variables as
* any updates to the secret value is not reflected until new instances start.
* </pre>
*
* <code>string version = 4;</code>
*
* @param value The bytes for version to set.
* @return This builder for chaining.
*/
public Builder setVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
version_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.functions.v1.SecretEnvVar)
}
// @@protoc_insertion_point(class_scope:google.cloud.functions.v1.SecretEnvVar)
private static final com.google.cloud.functions.v1.SecretEnvVar DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.functions.v1.SecretEnvVar();
}
public static com.google.cloud.functions.v1.SecretEnvVar getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SecretEnvVar> PARSER =
new com.google.protobuf.AbstractParser<SecretEnvVar>() {
@java.lang.Override
public SecretEnvVar parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SecretEnvVar> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SecretEnvVar> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.functions.v1.SecretEnvVar getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 34,847 | classlib/modules/luni/src/test/api/common/org/apache/harmony/luni/tests/java/lang/ThreadTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.luni.tests.java.lang;
import java.lang.Thread.UncaughtExceptionHandler;
import java.security.Permission;
import java.util.Map;
public class ThreadTest extends junit.framework.TestCase {
static class SimpleThread implements Runnable {
int delay;
public void run() {
try {
synchronized (this) {
this.notify();
this.wait(delay);
}
} catch (InterruptedException e) {
return;
}
}
public SimpleThread(int d) {
if (d >= 0)
delay = d;
}
}
static class YieldThread implements Runnable {
volatile int delay;
public void run() {
int x = 0;
while (true) {
++x;
}
}
public YieldThread(int d) {
if (d >= 0)
delay = d;
}
}
static class ResSupThread implements Runnable {
Thread parent;
volatile int checkVal = -1;
public void run() {
try {
synchronized (this) {
this.notify();
}
while (true) {
checkVal++;
zz();
Thread.sleep(100);
}
} catch (InterruptedException e) {
return;
} catch (BogusException e) {
try {
// Give parent a chance to sleep
Thread.sleep(500);
} catch (InterruptedException x) {
}
parent.interrupt();
while (!Thread.currentThread().isInterrupted()) {
// Don't hog the CPU
try {
Thread.sleep(50);
} catch (InterruptedException x) {
// This is what we've been waiting for...don't throw it
// away!
break;
}
}
}
}
public void zz() throws BogusException {
}
public ResSupThread(Thread t) {
parent = t;
}
public synchronized int getCheckVal() {
return checkVal;
}
}
static class BogusException extends Throwable {
private static final long serialVersionUID = 1L;
public BogusException(String s) {
super(s);
}
}
Thread st, ct, spinner;
static boolean calledMySecurityManager = false;
/**
* @tests java.lang.Thread#Thread()
*/
public void test_Constructor() {
// Test for method java.lang.Thread()
Thread t;
SecurityManager m = new SecurityManager() {
@Override
public ThreadGroup getThreadGroup() {
calledMySecurityManager = true;
return Thread.currentThread().getThreadGroup();
}
@Override
public void checkPermission(Permission permission) {
if (permission.getName().equals("setSecurityManager")) {
return;
}
super.checkPermission(permission);
}
};
try {
// To see if it checks Thread creation with our SecurityManager
System.setSecurityManager(m);
t = new Thread();
} finally {
// restore original, no side-effects
System.setSecurityManager(null);
}
assertTrue("Did not call SecurityManager.getThreadGroup ()",
calledMySecurityManager);
t.start();
}
/**
* @tests java.lang.Thread#Thread(java.lang.Runnable)
*/
public void test_ConstructorLjava_lang_Runnable() {
// Test for method java.lang.Thread(java.lang.Runnable)
ct = new Thread(new SimpleThread(10));
ct.start();
}
/**
* @tests java.lang.Thread#Thread(java.lang.Runnable, java.lang.String)
*/
public void test_ConstructorLjava_lang_RunnableLjava_lang_String() {
// Test for method java.lang.Thread(java.lang.Runnable,
// java.lang.String)
Thread st1 = new Thread(new SimpleThread(1), "SimpleThread1");
assertEquals("Constructed thread with incorrect thread name", "SimpleThread1", st1
.getName());
st1.start();
}
/**
* @tests java.lang.Thread#Thread(java.lang.String)
*/
public void test_ConstructorLjava_lang_String() {
// Test for method java.lang.Thread(java.lang.String)
Thread t = new Thread("Testing");
assertEquals("Created tread with incorrect name",
"Testing", t.getName());
t.start();
}
/**
* @tests java.lang.Thread#Thread(java.lang.ThreadGroup, java.lang.Runnable)
*/
public void test_ConstructorLjava_lang_ThreadGroupLjava_lang_Runnable() {
// Test for method java.lang.Thread(java.lang.ThreadGroup,
// java.lang.Runnable)
ThreadGroup tg = new ThreadGroup("Test Group1");
st = new Thread(tg, new SimpleThread(1), "SimpleThread2");
assertTrue("Returned incorrect thread group", st.getThreadGroup() == tg);
st.start();
try {
st.join();
} catch (InterruptedException e) {
}
tg.destroy();
}
/**
* @tests java.lang.Thread#Thread(java.lang.ThreadGroup, java.lang.Runnable,
* java.lang.String)
*/
public void test_ConstructorLjava_lang_ThreadGroupLjava_lang_RunnableLjava_lang_String() {
// Test for method java.lang.Thread(java.lang.ThreadGroup,
// java.lang.Runnable, java.lang.String)
ThreadGroup tg = new ThreadGroup("Test Group2");
st = new Thread(tg, new SimpleThread(1), "SimpleThread3");
assertTrue("Constructed incorrect thread", (st.getThreadGroup() == tg)
&& st.getName().equals("SimpleThread3"));
st.start();
try {
st.join();
} catch (InterruptedException e) {
}
tg.destroy();
Runnable r = new Runnable() {
public void run() {
}
};
ThreadGroup foo = null;
try {
new Thread(foo = new ThreadGroup("foo"), r, null);
// Should not get here
fail("Null cannot be accepted as Thread name");
} catch (NullPointerException npe) {
assertTrue("Null cannot be accepted as Thread name", true);
foo.destroy();
}
}
/**
* @tests java.lang.Thread#Thread(java.lang.ThreadGroup, java.lang.String)
*/
public void test_ConstructorLjava_lang_ThreadGroupLjava_lang_String() {
// Test for method java.lang.Thread(java.lang.ThreadGroup,
// java.lang.String)
st = new Thread(new SimpleThread(1), "SimpleThread4");
assertEquals("Returned incorrect thread name",
"SimpleThread4", st.getName());
st.start();
}
/**
* @tests java.lang.Thread#activeCount()
*/
public void test_activeCount() {
// Test for method int java.lang.Thread.activeCount()
Thread t = new Thread(new SimpleThread(10));
int active = 0;
synchronized (t) {
t.start();
active = Thread.activeCount();
}
assertTrue("Incorrect activeCount for current group: " + active, active > 1);
try {
t.join();
} catch (InterruptedException e) {
}
}
/**
* @tests java.lang.Thread#checkAccess()
*/
public void test_checkAccess() {
// Test for method void java.lang.Thread.checkAccess()
ThreadGroup tg = new ThreadGroup("Test Group3");
try {
st = new Thread(tg, new SimpleThread(1), "SimpleThread5");
st.checkAccess();
assertTrue("CheckAccess passed", true);
} catch (SecurityException e) {
fail("CheckAccess failed : " + e.getMessage());
}
st.start();
try {
st.join();
} catch (InterruptedException e) {
}
tg.destroy();
}
/**
* @tests java.lang.Thread#countStackFrames()
*/
@SuppressWarnings("deprecation")
public void test_countStackFrames() {
/*
* Thread.countStackFrames() is unpredictable, so we just test that it
* doesn't throw an exception.
*/
Thread.currentThread().countStackFrames();
}
/**
* @tests java.lang.Thread#currentThread()
*/
public void test_currentThread() {
assertNotNull(Thread.currentThread());
}
/**
* @tests java.lang.Thread#destroy()
*/
@SuppressWarnings("deprecation")
public void test_destroy() {
try {
new Thread().destroy();
// FIXME uncomment when IBM VME is updated
//fail("NoSuchMethodError was not thrown");
} catch (NoSuchMethodError e) {
}
}
/**
* @tests java.lang.Thread#enumerate(java.lang.Thread[])
*/
public void test_enumerate$Ljava_lang_Thread() {
// Test for method int java.lang.Thread.enumerate(java.lang.Thread [])
// The test has been updated according to HARMONY-1974 JIRA issue.
class MyThread extends Thread {
MyThread(ThreadGroup tg, String name) {
super(tg, name);
}
boolean failed = false;
String failMessage = null;
public void run() {
SimpleThread st1 = null;
SimpleThread st2 = null;
ThreadGroup mytg = null;
Thread firstOne = null;
Thread secondOne = null;
try {
int arrayLength = 10;
Thread[] tarray = new Thread[arrayLength];
st1 = new SimpleThread(-1);
st2 = new SimpleThread(-1);
mytg = new ThreadGroup("jp");
firstOne = new Thread(mytg, st1, "firstOne2");
secondOne = new Thread(mytg, st2, "secondOne1");
int count = Thread.enumerate(tarray);
assertEquals("Incorrect value returned1",
1, count);
synchronized (st1) {
firstOne.start();
try {
st1.wait();
} catch (InterruptedException e) {
}
}
count = Thread.enumerate(tarray);
assertEquals("Incorrect value returned2",
2, count);
synchronized (st2) {
secondOne.start();
try {
st2.wait();
} catch (InterruptedException e) {
}
}
count = Thread.enumerate(tarray);
assertEquals("Incorrect value returned3",
3, count);
} catch (junit.framework.AssertionFailedError e) {
failed = true;
failMessage = e.getMessage();
} finally {
synchronized (st1) {
firstOne.interrupt();
}
synchronized (st2) {
secondOne.interrupt();
}
try {
firstOne.join();
secondOne.join();
} catch (InterruptedException e) {
}
mytg.destroy();
}
}
};
ThreadGroup tg = new ThreadGroup("tg");
MyThread t = new MyThread(tg, "top");
t.start();
try {
t.join();
} catch (InterruptedException e) {
fail("Unexpected interrupt");
} finally {
tg.destroy();
}
assertFalse(t.failMessage, t.failed);
}
/**
* @tests java.lang.Thread#getContextClassLoader()
*/
public void test_getContextClassLoader() {
// Test for method java.lang.ClassLoader
// java.lang.Thread.getContextClassLoader()
Thread t = new Thread();
assertTrue("Incorrect class loader returned",
t.getContextClassLoader() == Thread.currentThread()
.getContextClassLoader());
t.start();
}
/**
* @tests java.lang.Thread#getName()
*/
public void test_getName() {
// Test for method java.lang.String java.lang.Thread.getName()
st = new Thread(new SimpleThread(1), "SimpleThread6");
assertEquals("Returned incorrect thread name",
"SimpleThread6", st.getName());
st.start();
}
/**
* @tests java.lang.Thread#getPriority()
*/
public void test_getPriority() {
// Test for method int java.lang.Thread.getPriority()
st = new Thread(new SimpleThread(1));
st.setPriority(Thread.MAX_PRIORITY);
assertTrue("Returned incorrect thread priority",
st.getPriority() == Thread.MAX_PRIORITY);
st.start();
}
/**
* @tests java.lang.Thread#getThreadGroup()
*/
public void test_getThreadGroup() {
// Test for method java.lang.ThreadGroup
// java.lang.Thread.getThreadGroup()
ThreadGroup tg = new ThreadGroup("Test Group4");
st = new Thread(tg, new SimpleThread(1), "SimpleThread8");
assertTrue("Returned incorrect thread group", st.getThreadGroup() == tg);
st.start();
try {
st.join();
} catch (InterruptedException e) {
}
assertNull("group should be null", st.getThreadGroup());
assertNotNull("toString() should not be null", st.toString());
tg.destroy();
final Object lock = new Object();
Thread t = new Thread() {
@Override
public void run() {
synchronized (lock) {
lock.notifyAll();
}
}
};
synchronized (lock) {
t.start();
try {
lock.wait();
} catch (InterruptedException e) {
}
}
int running = 0;
while (t.isAlive())
running++;
ThreadGroup group = t.getThreadGroup();
assertNull("ThreadGroup is not null", group);
}
/**
* @tests java.lang.Thread#interrupt()
*/
public void test_interrupt() {
// Test for method void java.lang.Thread.interrupt()
final Object lock = new Object();
class ChildThread1 extends Thread {
Thread parent;
boolean sync;
@Override
public void run() {
if (sync) {
synchronized (lock) {
lock.notify();
try {
lock.wait();
} catch (InterruptedException e) {
}
}
}
parent.interrupt();
}
public ChildThread1(Thread p, String name, boolean sync) {
super(name);
parent = p;
this.sync = sync;
}
}
boolean interrupted = false;
try {
ct = new ChildThread1(Thread.currentThread(), "Interrupt Test1",
false);
synchronized (lock) {
ct.start();
lock.wait();
}
} catch (InterruptedException e) {
interrupted = true;
}
assertTrue("Failed to Interrupt thread1", interrupted);
interrupted = false;
try {
ct = new ChildThread1(Thread.currentThread(), "Interrupt Test2",
true);
synchronized (lock) {
ct.start();
lock.wait();
lock.notify();
}
Thread.sleep(20000);
} catch (InterruptedException e) {
interrupted = true;
}
assertTrue("Failed to Interrupt thread2", interrupted);
}
/**
* @tests java.lang.Thread#interrupted()
*/
public void test_interrupted() {
assertFalse("Interrupted returned true for non-interrupted thread", Thread
.interrupted());
Thread.currentThread().interrupt();
assertTrue("Interrupted returned true for non-interrupted thread", Thread.interrupted());
assertFalse("Failed to clear interrupted flag", Thread.interrupted());
}
/**
* @tests java.lang.Thread#isAlive()
*/
public void test_isAlive() {
// Test for method boolean java.lang.Thread.isAlive()
SimpleThread simple;
st = new Thread(simple = new SimpleThread(500));
assertFalse("A thread that wasn't started is alive.", st.isAlive());
synchronized (simple) {
st.start();
try {
simple.wait();
} catch (InterruptedException e) {
}
}
assertTrue("Started thread returned false", st.isAlive());
try {
st.join();
} catch (InterruptedException e) {
fail("Thread did not die");
}
assertTrue("Stopped thread returned true", !st.isAlive());
}
/**
* @tests java.lang.Thread#isDaemon()
*/
public void test_isDaemon() {
// Test for method boolean java.lang.Thread.isDaemon()
st = new Thread(new SimpleThread(1), "SimpleThread10");
assertTrue("Non-Daemon thread returned true", !st.isDaemon());
st.setDaemon(true);
assertTrue("Daemon thread returned false", st.isDaemon());
st.start();
}
/**
* @tests java.lang.Thread#isInterrupted()
*/
public void test_isInterrupted() {
// Test for method boolean java.lang.Thread.isInterrupted()
class SpinThread implements Runnable {
public volatile boolean done = false;
public void run() {
while (!Thread.currentThread().isInterrupted())
;
while (!done)
;
}
}
SpinThread spin = new SpinThread();
spinner = new Thread(spin);
spinner.start();
Thread.yield();
try {
assertTrue("Non-Interrupted thread returned true", !spinner
.isInterrupted());
spinner.interrupt();
assertTrue("Interrupted thread returned false", spinner
.isInterrupted());
spin.done = true;
} finally {
spinner.interrupt();
spin.done = true;
}
}
/**
* @tests java.lang.Thread#join()
*/
public void test_join() {
// Test for method void java.lang.Thread.join()
SimpleThread simple;
try {
st = new Thread(simple = new SimpleThread(100));
// cause isAlive() to be compiled by the JIT, as it must be called
// within 100ms below.
assertTrue("Thread is alive", !st.isAlive());
synchronized (simple) {
st.start();
simple.wait();
}
st.join();
} catch (InterruptedException e) {
fail("Join failed ");
}
assertTrue("Joined thread is still alive", !st.isAlive());
boolean result = true;
Thread th = new Thread("test");
try {
th.join();
} catch (InterruptedException e) {
result = false;
}
assertTrue("Hung joining a non-started thread", result);
th.start();
}
/**
* @tests java.lang.Thread#join(long)
*/
public void test_joinJ() {
// Test for method void java.lang.Thread.join(long)
SimpleThread simple;
try {
st = new Thread(simple = new SimpleThread(1000), "SimpleThread12");
// cause isAlive() to be compiled by the JIT, as it must be called
// within 100ms below.
assertTrue("Thread is alive", !st.isAlive());
synchronized (simple) {
st.start();
simple.wait();
}
st.join(10);
} catch (InterruptedException e) {
fail("Join failed ");
}
assertTrue("Join failed to timeout", st.isAlive());
st.interrupt();
try {
st = new Thread(simple = new SimpleThread(100), "SimpleThread13");
synchronized (simple) {
st.start();
simple.wait();
}
st.join(1000);
} catch (InterruptedException e) {
fail("Join failed : " + e.getMessage());
return;
}
assertTrue("Joined thread is still alive", !st.isAlive());
final Object lock = new Object();
final Thread main = Thread.currentThread();
Thread killer = new Thread(new Runnable() {
public void run() {
try {
synchronized (lock) {
lock.notify();
}
Thread.sleep(100);
} catch (InterruptedException e) {
return;
}
main.interrupt();
}
});
boolean result = true;
Thread th = new Thread("test");
try {
synchronized (lock) {
killer.start();
lock.wait();
}
th.join(200);
} catch (InterruptedException e) {
result = false;
}
killer.interrupt();
assertTrue("Hung joining a non-started thread", result);
th.start();
}
/**
* @tests java.lang.Thread#join(long, int)
*/
public void test_joinJI() throws Exception {
// Test for method void java.lang.Thread.join(long, int)
SimpleThread simple;
st = new Thread(simple = new SimpleThread(1000), "Squawk1");
assertTrue("Thread is alive", !st.isAlive());
synchronized (simple) {
st.start();
simple.wait();
}
long firstRead = System.currentTimeMillis();
st.join(100, 999999);
long secondRead = System.currentTimeMillis();
assertTrue("Did not join by appropriate time: " + secondRead + "-"
+ firstRead + "=" + (secondRead - firstRead), secondRead
- firstRead <= 300);
assertTrue("Joined thread is not alive", st.isAlive());
st.interrupt();
final Object lock = new Object();
final Thread main = Thread.currentThread();
Thread killer = new Thread(new Runnable() {
public void run() {
try {
synchronized (lock) {
lock.notify();
}
Thread.sleep(100);
} catch (InterruptedException e) {
return;
}
main.interrupt();
}
});
boolean result = true;
Thread th = new Thread("test");
try {
synchronized (lock) {
killer.start();
lock.wait();
}
th.join(200, 20);
} catch (InterruptedException e) {
result = false;
}
killer.interrupt();
assertTrue("Hung joining a non-started thread", result);
th.start();
}
/**
* @tests java.lang.Thread#resume()
*/
@SuppressWarnings("deprecation")
public void test_resume() {
// Test for method void java.lang.Thread.resume()
int orgval;
ResSupThread t;
try {
t = new ResSupThread(Thread.currentThread());
synchronized (t) {
ct = new Thread(t, "Interrupt Test2");
ct.start();
t.wait();
}
ct.suspend();
// Wait to be sure the suspend has occurred
Thread.sleep(500);
orgval = t.getCheckVal();
// Wait to be sure the thread is suspended
Thread.sleep(500);
assertTrue("Failed to suspend thread", orgval == t.getCheckVal());
ct.resume();
// Wait to be sure the resume has occurred.
Thread.sleep(500);
assertTrue("Failed to resume thread", orgval != t.getCheckVal());
ct.interrupt();
} catch (InterruptedException e) {
fail("Unexpected interrupt occurred : " + e.getMessage());
}
}
/**
* @tests java.lang.Thread#run()
*/
public void test_run() {
// Test for method void java.lang.Thread.run()
class RunThread implements Runnable {
boolean didThreadRun = false;
public void run() {
didThreadRun = true;
}
}
RunThread rt = new RunThread();
Thread t = new Thread(rt);
try {
t.start();
int count = 0;
while (!rt.didThreadRun && count < 20) {
Thread.sleep(100);
count++;
}
assertTrue("Thread did not run", rt.didThreadRun);
t.join();
} catch (InterruptedException e) {
assertTrue("Joined thread was interrupted", true);
}
assertTrue("Joined thread is still alive", !t.isAlive());
}
/**
* @tests java.lang.Thread#setDaemon(boolean)
*/
public void test_setDaemonZ() {
// Test for method void java.lang.Thread.setDaemon(boolean)
st = new Thread(new SimpleThread(1), "SimpleThread14");
st.setDaemon(true);
assertTrue("Failed to set thread as daemon thread", st.isDaemon());
st.start();
}
/**
* @tests java.lang.Thread#setName(java.lang.String)
*/
public void test_setNameLjava_lang_String() {
// Test for method void java.lang.Thread.setName(java.lang.String)
st = new Thread(new SimpleThread(1), "SimpleThread15");
st.setName("Bogus Name");
assertEquals("Failed to set thread name",
"Bogus Name", st.getName());
try {
st.setName(null);
fail("Null should not be accepted as a valid name");
} catch (NullPointerException e) {
// success
assertTrue("Null should not be accepted as a valid name", true);
}
st.start();
}
/**
* @tests java.lang.Thread#setPriority(int)
*/
public void test_setPriorityI() {
// Test for method void java.lang.Thread.setPriority(int)
st = new Thread(new SimpleThread(1));
st.setPriority(Thread.MAX_PRIORITY);
assertTrue("Failed to set priority",
st.getPriority() == Thread.MAX_PRIORITY);
st.start();
}
/**
* @tests java.lang.Thread#sleep(long)
*/
public void test_sleepJ() {
// Test for method void java.lang.Thread.sleep(long)
// TODO : Test needs enhancing.
long stime = 0, ftime = 0;
try {
stime = System.currentTimeMillis();
Thread.sleep(1000);
ftime = System.currentTimeMillis();
} catch (InterruptedException e) {
fail("Unexpected interrupt received");
}
assertTrue("Failed to sleep long enough", (ftime - stime) >= 800);
}
/**
* @tests java.lang.Thread#sleep(long, int)
*/
public void test_sleepJI() {
// Test for method void java.lang.Thread.sleep(long, int)
// TODO : Test needs revisiting.
long stime = 0, ftime = 0;
try {
stime = System.currentTimeMillis();
Thread.sleep(1000, 999999);
ftime = System.currentTimeMillis();
} catch (InterruptedException e) {
fail("Unexpected interrupt received");
}
long result = ftime - stime;
assertTrue("Failed to sleep long enough: " + result, result >= 900
&& result <= 1100);
}
/**
* @tests java.lang.Thread#start()
*/
public void test_start() {
// Test for method void java.lang.Thread.start()
try {
ResSupThread t = new ResSupThread(Thread.currentThread());
synchronized (t) {
ct = new Thread(t, "Interrupt Test4");
ct.start();
t.wait();
}
assertTrue("Thread is not running1", ct.isAlive());
// Let the child thread get going.
int orgval = t.getCheckVal();
Thread.sleep(150);
assertTrue("Thread is not running2", orgval != t.getCheckVal());
ct.interrupt();
} catch (InterruptedException e) {
fail("Unexpected interrupt occurred");
}
}
/**
* @tests java.lang.Thread#stop()
*/
@SuppressWarnings("deprecation")
public void test_stop() {
// Test for method void java.lang.Thread.stop()
try {
Runnable r = new ResSupThread(null);
synchronized (r) {
st = new Thread(r, "Interupt Test5");
st.start();
r.wait();
}
} catch (InterruptedException e) {
fail("Unexpected interrupt received");
}
st.stop();
try {
st.join(10000);
} catch (InterruptedException e1) {
st.interrupt();
fail("Failed to stopThread before 10000 timeout");
}
assertTrue("Failed to stopThread", !st.isAlive());
}
/**
* @tests java.lang.Thread#stop()
*/
@SuppressWarnings("deprecation")
public void test_stop_subtest0() {
Thread t = new Thread("t");
class MySecurityManager extends SecurityManager {
public boolean intest = false;
@Override
public void checkAccess(Thread t) {
if (intest) {
fail("checkAccess called");
}
}
@Override
public void checkPermission(Permission permission) {
if (permission.getName().equals("setSecurityManager")) {
return;
}
super.checkPermission(permission);
}
}
MySecurityManager sm = new MySecurityManager();
System.setSecurityManager(sm);
try {
sm.intest = true;
try {
t.stop();
// Ignore any SecurityExceptions, may not have stopThread
// permission
} catch (SecurityException e) {
}
sm.intest = false;
t.start();
try {
t.join(2000);
} catch (InterruptedException e) {
}
sm.intest = true;
try {
t.stop();
// Ignore any SecurityExceptions, may not have stopThread
// permission
} catch (SecurityException e) {
}
sm.intest = false;
} finally {
System.setSecurityManager(null);
}
}
/**
* @tests java.lang.Thread#stop(java.lang.Throwable)
*/
@SuppressWarnings("deprecation")
public void test_stopLjava_lang_Throwable_subtest0() {
Thread t = new Thread("t");
class MySecurityManager extends SecurityManager {
public boolean intest = false;
public boolean checkAccess = false;
@Override
public void checkAccess(Thread t) {
if (intest) {
checkAccess = true;
}
}
@Override
public void checkPermission(Permission permission) {
if (permission.getName().equals("setSecurityManager")) {
return;
}
super.checkPermission(permission);
}
}
MySecurityManager sm = new MySecurityManager();
System.setSecurityManager(sm);
try {
sm.intest = true;
try {
t.stop(new ThreadDeath());
// Ignore any SecurityExceptions, may not have stopThread
// permission
} catch (SecurityException e) {
}
sm.intest = false;
assertTrue("no checkAccess 1", sm.checkAccess);
t.start();
try {
t.join(2000);
} catch (InterruptedException e) {
}
sm.intest = true;
sm.checkAccess = false;
try {
t.stop(new ThreadDeath());
// Ignore any SecurityExceptions, may not have stopThread
// permission
} catch (SecurityException e) {
}
assertTrue("no checkAccess 2", sm.checkAccess);
sm.intest = false;
} finally {
System.setSecurityManager(null);
}
}
/**
* @tests java.lang.Thread#stop(java.lang.Throwable)
*/
@SuppressWarnings("deprecation")
public void test_stopLjava_lang_Throwable() {
// Test for method void java.lang.Thread.stop(java.lang.Throwable)
ResSupThread t = new ResSupThread(Thread.currentThread());
synchronized (t) {
st = new Thread(t, "StopThread");
st.setPriority(Thread.MAX_PRIORITY);
st.start();
try {
t.wait();
} catch (InterruptedException e) {
}
}
try {
st.stop(new BogusException("Bogus"));
Thread.sleep(20000);
} catch (InterruptedException e) {
assertTrue("Stopped child with exception not alive", st.isAlive());
st.interrupt();
return;
}
st.interrupt();
fail("Stopped child did not throw exception");
}
/**
* @tests java.lang.Thread#suspend()
*/
@SuppressWarnings("deprecation")
public void test_suspend() {
// Test for method void java.lang.Thread.suspend()
int orgval;
ResSupThread t = new ResSupThread(Thread.currentThread());
try {
synchronized (t) {
ct = new Thread(t, "Interupt Test6");
ct.start();
t.wait();
}
ct.suspend();
// Wait to be sure the suspend has occurred
Thread.sleep(500);
orgval = t.getCheckVal();
// Wait to be sure the thread is suspended
Thread.sleep(500);
assertTrue("Failed to suspend thread", orgval == t.getCheckVal());
ct.resume();
// Wait to be sure the resume has occurred.
Thread.sleep(500);
assertTrue("Failed to resume thread", orgval != t.getCheckVal());
ct.interrupt();
} catch (InterruptedException e) {
fail("Unexpected interrupt occurred");
}
final Object notify = new Object();
Thread t1 = new Thread(new Runnable() {
public void run() {
synchronized (notify) {
notify.notify();
}
Thread.currentThread().suspend();
}
});
try {
synchronized (notify) {
t1.start();
notify.wait();
}
// wait for Thread to suspend
Thread.sleep(500);
assertTrue("Thread should be alive", t1.isAlive());
t1.resume();
t1.join();
} catch (InterruptedException e) {
}
}
/**
* @tests java.lang.Thread#toString()
*/
public void test_toString() {
// Test for method java.lang.String java.lang.Thread.toString()
ThreadGroup tg = new ThreadGroup("Test Group5");
st = new Thread(tg, new SimpleThread(1), "SimpleThread17");
final String stString = st.toString();
final String expected = "Thread[SimpleThread17,5,Test Group5]";
assertTrue("Returned incorrect string: " + stString + "\t(expecting :"
+ expected + ")", stString.equals(expected));
st.start();
try {
st.join();
} catch (InterruptedException e) {
}
tg.destroy();
}
/**
* @tests java.lang.Thread#getAllStackTraces()
*/
public void test_getAllStackTraces() {
Map<Thread, StackTraceElement[]> stMap = Thread.getAllStackTraces();
assertNotNull(stMap);
//TODO add security-based tests
}
/**
* @tests java.lang.Thread#getDefaultUncaughtExceptionHandler
* @tests java.lang.Thread#setDefaultUncaughtExceptionHandler
*/
public void test_get_setDefaultUncaughtExceptionHandler() {
class Handler implements UncaughtExceptionHandler {
public void uncaughtException(Thread thread, Throwable ex) {
}
}
final Handler handler = new Handler();
Thread.setDefaultUncaughtExceptionHandler(handler);
assertSame(handler, Thread.getDefaultUncaughtExceptionHandler());
Thread.setDefaultUncaughtExceptionHandler(null);
assertNull(Thread.getDefaultUncaughtExceptionHandler());
//TODO add security-based tests
}
/**
* @tests java.lang.Thread#getStackTrace()
*/
public void test_getStackTrace() {
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
assertNotNull(stackTrace);
stack_trace_loop: {
for (int i = 0; i < stackTrace.length; i++) {
StackTraceElement e = stackTrace[i];
if (getClass().getName().equals(e.getClassName())) {
if ("test_getStackTrace".equals(e.getMethodName())) {
break stack_trace_loop;
}
}
}
fail("class and method not found in stack trace");
}
//TODO add security-based tests
}
/**
* @tests java.lang.Thread#getState()
*/
public void test_getState() {
Thread.State state = Thread.currentThread().getState();
assertNotNull(state);
assertEquals(Thread.State.RUNNABLE, state);
//TODO add additional state tests
}
/**
* @tests java.lang.Thread#getUncaughtExceptionHandler
* @tests java.lang.Thread#setUncaughtExceptionHandler
*/
public void test_get_setUncaughtExceptionHandler() {
class Handler implements UncaughtExceptionHandler {
public void uncaughtException(Thread thread, Throwable ex) {
}
}
final Handler handler = new Handler();
Thread.currentThread().setUncaughtExceptionHandler(handler);
assertSame(handler, Thread.currentThread().getUncaughtExceptionHandler());
Thread.currentThread().setUncaughtExceptionHandler(null);
//TODO add security-based tests
}
/**
* @tests java.lang.Thread#getId()
*/
public void test_getId() {
assertTrue("current thread's ID is not positive", Thread.currentThread().getId() > 0);
//check all the current threads for positive IDs
Map<Thread, StackTraceElement[]> stMap = Thread.getAllStackTraces();
for (Thread thread : stMap.keySet()) {
assertTrue("thread's ID is not positive: " + thread.getName(), thread.getId() > 0);
}
}
@Override
protected void tearDown() {
try {
if (st != null)
st.interrupt();
} catch (Exception e) {
}
try {
if (spinner != null)
spinner.interrupt();
} catch (Exception e) {
}
try {
if (ct != null)
ct.interrupt();
} catch (Exception e) {
}
try {
spinner = null;
st = null;
ct = null;
System.runFinalization();
} catch (Exception e) {
}
}
}
|
apache/ignite | 37,179 | modules/core/src/test/java/org/apache/ignite/internal/processors/database/IgniteDbPutGetAbstractTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.database;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.ThreadLocalRandom;
import javax.cache.Cache;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.query.QueryCursor;
import org.apache.ignite.cache.query.ScanQuery;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cache.query.SqlQuery;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.failure.FailureHandler;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.processors.cache.GridCacheAdapter;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearCacheAdapter;
import org.apache.ignite.internal.util.GridRandom;
import org.apache.ignite.internal.util.typedef.PA;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.GridTestUtils.SF;
import org.junit.Assert;
import org.junit.Test;
import static org.apache.ignite.configuration.DataStorageConfiguration.DFLT_WAL_SEGMENT_SIZE;
import static org.apache.ignite.testframework.GridTestUtils.assertThrows;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
/**
*
*/
public abstract class IgniteDbPutGetAbstractTest extends IgniteDbAbstractTest {
/** */
private static final int KEYS_COUNT = SF.applyLB(10_000, 2_000);
/** Index of Ignite node with a reduced WAL buffer size. */
private static final int smallWalBufSizeNodeIdx = 0;
/** Set of nodes that indicates system critical failure on a particular node. */
private final Set<String> failedNodes = new ConcurrentSkipListSet<>();
/**
* @return Ignite instance for testing.
*/
private IgniteEx ig() {
if (withClientNearCache())
return grid(gridCount());
return grid(0);
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
if (getTestIgniteInstanceIndex(gridName) == smallWalBufSizeNodeIdx) {
cfg.getDataStorageConfiguration()
.setWalBufferSize(DFLT_WAL_SEGMENT_SIZE / 4)
.setWalSegmentSize(DFLT_WAL_SEGMENT_SIZE / 4);
}
return cfg;
}
/** {@inheritDoc} */
@Override protected FailureHandler getFailureHandler(String igniteInstanceName) {
return (ignite, failureCtx) -> {
failedNodes.add(ignite.name());
return true;
};
}
/**
* @return Cache for testing.
* @throws Exception If failed.
*/
private <K, V> IgniteCache<K, V> cache(String name) throws Exception {
if (withClientNearCache())
return ig().getOrCreateNearCache(name, new NearCacheConfiguration<K, V>());
return ig().cache(name);
}
/**
*
*/
@Test
public void testGradualRandomPutAllRemoveAll() throws Exception {
IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
final int cnt = KEYS_COUNT;
Random rnd = new Random();
Map<Integer, DbValue> map = new HashMap<>();
for (int i = 0; i < SF.applyLB(5, 3); i++) {
info("Iteration: " + i);
info("Grow...");
while (map.size() < cnt / 2)
doPutRemoveAll(rnd, cache, map, cnt, true);
info("Shrink...");
while (map.size() > cnt / 4)
doPutRemoveAll(rnd, cache, map, cnt, false);
info("Check...");
for (Integer key : map.keySet())
assertEquals(map.get(key), cache.get(key));
}
}
/** */
private void doPutRemoveAll(Random rnd, IgniteCache<Integer, DbValue> cache, Map<Integer, DbValue> map,
int keysCnt, boolean grow) {
int putCnt = grow ? 20 + rnd.nextInt(10) : 1 + rnd.nextInt(5);
int rmvCnt = grow ? 1 + rnd.nextInt(5) : 20 + rnd.nextInt(10);
Map<Integer, DbValue> put = new HashMap<>(putCnt);
for (int i = 0; i < putCnt; i++) {
int k = rnd.nextInt(keysCnt);
put.put(k, new DbValue(rnd.nextInt(500), rnd.nextInt(500) + "-value", i));
}
map.putAll(put);
cache.putAll(put);
Set<Integer> rmv = new HashSet<>();
for (int i = 0; i < rmvCnt; i++) {
int k = rnd.nextInt(keysCnt);
rmv.add(k);
map.remove(k);
}
cache.removeAll(rmv);
}
/**
*
*/
@Test
public void testRandomRemove() throws Exception {
IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
final int cnt = SF.apply(30_000);
long seed = System.nanoTime();
X.println("Seed: " + seed);
Random rnd = new GridRandom(seed);
int[] keys = generateUniqueRandomKeys(cnt, rnd);
X.println("Put start");
for (int i : keys) {
DbValue v0 = new DbValue(i, "test-value", i);
cache.put(i, v0);
assertEquals(v0, cache.get(i));
}
keys = generateUniqueRandomKeys(cnt, rnd);
X.println("Rmv start");
for (int i : keys)
assertTrue(cache.remove(i));
}
/**
*/
@Test
public void testRandomPut() throws Exception {
IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
final int cnt = 1_000;
long seed = System.nanoTime();
X.println("Seed: " + seed);
Random rnd = new GridRandom(seed);
for (int i = 0; i < 50_000; i++) {
int k = rnd.nextInt(cnt);
DbValue v0 = new DbValue(k, "test-value " + k, i);
if (i % 1000 == 0)
X.println(" --> " + i);
cache.put(k, v0);
assertEquals(v0, cache.get(k));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutGetSimple() throws Exception {
IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int k0 = 0;
DbValue v0 = new DbValue(0, "value-0", 0L);
cache.put(k0, v0);
checkEmpty(internalCache, k0);
assertEquals(v0, cache.get(k0));
checkEmpty(internalCache, k0);
assertEquals(v0, cache.get(k0));
checkEmpty(internalCache, k0);
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutGetLarge() throws Exception {
IgniteCache<Integer, byte[]> cache = cache(DEFAULT_CACHE_NAME);
final byte[] val = new byte[2048];
ThreadLocalRandom.current().nextBytes(val);
cache.put(0, val);
Assert.assertArrayEquals(val, cache.get(0));
final IgniteCache<Integer, LargeDbValue> cache1 = cache("large");
final LargeDbValue large = new LargeDbValue("str1", "str2", randomInts(1024));
cache1.put(1, large);
assertEquals(large, cache1.get(1));
if (indexingEnabled()) {
final List<Cache.Entry<Integer, LargeDbValue>> all = cache1.query(
new SqlQuery<Integer, LargeDbValue>(LargeDbValue.class, "str1='str1'")).getAll();
assertEquals(1, all.size());
final Cache.Entry<Integer, LargeDbValue> entry = all.get(0);
assertEquals(1, entry.getKey().intValue());
assertEquals(large, entry.getValue());
}
cache.remove(0);
cache1.remove(1);
assertNull(cache.get(0));
assertNull(cache1.get(1));
}
/**
* Tests that putting a large entry, which size is greater than WAL buffer/segment size, results in CacheException.
*
* @throws Exception If failed.
*/
@Test
public void testPutLargeEntry() throws Exception {
assertTrue(
"Primary key should correspond to the node with small wal buffer size.",
smallWalBufSizeNodeIdx == 0);
IgniteCache<Integer, byte[]> atomicCache = grid(0).cache("atomic");
Integer atomicPrimaryKey = primaryKey(atomicCache);
// New value which is greater than WAL segment size / WAL buffer.
byte[] newVal = new byte[DFLT_WAL_SEGMENT_SIZE / 2];
assertThrows(
log,
() -> atomicCache.put(atomicPrimaryKey, newVal),
IgniteException.class,
null);
assertNull("Unexpected non-null value.", atomicCache.get(atomicPrimaryKey));
assertTrue("Unexpected system critical error.", failedNodes.isEmpty());
// Check backup scenario.
if (gridCount() > 1) {
Integer atomicBackupKey = backupKey(atomicCache);
Ignite primaryNode = primaryNode(atomicBackupKey, atomicCache.getName());
// Primary node should be updated successfully,
// however, backup node should fail because of size of the new entry does not allow to write it to WAL.
assertThrows(
log,
() -> atomicCache.put(atomicBackupKey, newVal),
IgniteException.class,
"Failed to update keys");
assertThat(
"Unexpected value.",
primaryNode.cache(atomicCache.getName()).get(atomicBackupKey),
is(newVal));
assertThat(
"Failure handler was not triggered on backup node.",
failedNodes,
hasItem(grid(0).name()));
assertFalse("Unexpected system critical error(s).", failedNodes.size() > 1);
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testPutGetLargeKeys() throws Exception {
IgniteCache<LargeDbKey, Integer> cache = ignite(0).cache(DEFAULT_CACHE_NAME);
ThreadLocalRandom rnd = ThreadLocalRandom.current();
Map<Integer, LargeDbKey> keys = new HashMap<>();
for (int i = 0; i < 100; i++) {
LargeDbKey key = new LargeDbKey(i, 512 + rnd.nextInt(1024));
assertNull(cache.get(key));
cache.put(key, i);
keys.put(i, key);
}
Map<LargeDbKey, Integer> res = cache.getAll(new HashSet<>(keys.values()));
assertEquals(keys.size(), res.size());
for (Map.Entry<Integer, LargeDbKey> e : keys.entrySet())
assertEquals(e.getKey(), res.get(e.getValue()));
cache.removeAll(new HashSet<>(keys.values()));
for (LargeDbKey key : keys.values())
assertNull(cache.get(key));
}
/**
* @param size Array size.
* @return Array with random items.
*/
private int[] randomInts(final int size) {
final ThreadLocalRandom rnd = ThreadLocalRandom.current();
final int[] arr = new int[size];
for (int i = 0; i < arr.length; i++)
arr[i] = rnd.nextInt();
return arr;
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutGetOverwrite() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
final int k0 = 0;
DbValue v0 = new DbValue(0, "value-0", 0L);
cache.put(k0, v0);
checkEmpty(internalCache, k0);
assertEquals(v0, cache.get(k0));
checkEmpty(internalCache, k0);
DbValue v1 = new DbValue(1, "value-1", 1L);
cache.put(k0, v1);
checkEmpty(internalCache, k0);
assertEquals(v1, cache.get(k0));
}
/**
* @throws Exception if failed.
*/
@Test
public void testOverwriteNormalSizeAfterSmallerSize() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
String[] vals = new String[] {"long-long-long-value", "short-value"};
final int k0 = 0;
for (int i = 0; i < 10; i++) {
DbValue v0 = new DbValue(i, vals[i % vals.length], i);
info("Update.... " + i);
cache.put(k0, v0);
checkEmpty(internalCache, k0);
assertEquals(v0, cache.get(k0));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutDoesNotTriggerRead() throws Exception {
IgniteEx ig = grid(0);
final IgniteCache<Integer, DbValue> cache = ig.cache(DEFAULT_CACHE_NAME);
cache.put(0, new DbValue(0, "test-value-0", 0));
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutGetMultipleObjects() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int cnt = 20_000;
X.println("Put start");
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
// if (i % 1000 == 0)
// X.println(" --> " + i);
cache.put(i, v0);
checkEmpty(internalCache, i);
assertEquals(v0, cache.get(i));
}
X.println("Get start");
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
checkEmpty(internalCache, i);
// X.println(" <-- " + i);
assertEquals(v0, cache.get(i));
}
assertEquals(cnt, cache.size());
if (indexingEnabled()) {
awaitPartitionMapExchange();
X.println("Query start");
assertEquals(cnt, cache.query(new SqlFieldsQuery("select null from dbvalue")).getAll().size());
List<List<?>> res = cache.query(new SqlFieldsQuery("select ival, _val from dbvalue where ival < ? order by ival asc")
.setArgs(10_000)).getAll();
assertEquals(10_000, res.size());
for (int i = 0; i < 10_000; i++) {
List<?> row = res.get(i);
assertEquals(2, row.size());
assertEquals(i, row.get(0));
assertEquals(new DbValue(i, "test-value", i), row.get(1));
}
assertEquals(1, cache.query(new SqlFieldsQuery("select lval from dbvalue where ival = 7899")).getAll().size());
assertEquals(2000, cache.query(new SqlFieldsQuery("select lval from dbvalue where ival >= 5000 and ival < 7000"))
.getAll().size());
String plan = cache.query(new SqlFieldsQuery(
"explain select lval from dbvalue where ival >= 5000 and ival < 7000")).getAll().get(0).get(0).toString();
assertTrue(plan, plan.contains("IVAL_IDX"));
}
assertTrue(cache.localSize(CachePeekMode.BACKUP) >= 0);
}
/**
* @throws Exception if failed.
*/
@Test
public void testSizeClear() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int cnt = 5000;
X.println("Put start");
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
cache.put(i, v0);
checkEmpty(internalCache, i);
assertEquals(v0, cache.get(i));
}
awaitPartitionMapExchange();
assertEquals(cnt, cache.size(CachePeekMode.OFFHEAP));
X.println("Clear start.");
cache.clear();
assertEquals(0, cache.size(CachePeekMode.OFFHEAP));
for (int i = 0; i < cnt; i++)
assertNull(cache.get(i));
}
/**
* @throws Exception if failed.
*/
@Test
public void testBounds() throws Exception {
IgniteEx ig = ig();
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
X.println("Put start");
int cnt = 1000;
try (IgniteDataStreamer<Integer, DbValue> st = ig.dataStreamer(DEFAULT_CACHE_NAME)) {
st.allowOverwrite(true);
for (int i = 0; i < cnt; i++) {
int k = 2 * i;
DbValue v0 = new DbValue(k, "test-value", k);
st.addData(k, v0);
}
}
X.println("Get start");
for (int i = 0; i < cnt; i++) {
int k = 2 * i;
DbValue v0 = new DbValue(k, "test-value", k);
assertEquals(v0, cache.get(k));
}
if (indexingEnabled()) {
awaitPartitionMapExchange();
X.println("Query start");
// Make sure to cover multiple pages.
int limit = 500;
for (int i = 0; i < limit; i++) {
List<List<?>> res = cache.query(new SqlFieldsQuery("select ival, _val from dbvalue where ival < ? order by ival")
.setArgs(i)).getAll();
// 0 => 0, 1 => 1, 2=>1,...
assertEquals((i + 1) / 2, res.size());
res = cache.query(new SqlFieldsQuery("select ival, _val from dbvalue where ival <= ? order by ival")
.setArgs(i)).getAll();
// 0 => 1, 1 => 1, 2=>2,...
assertEquals(i / 2 + 1, res.size());
}
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testMultithreadedPut() throws Exception {
IgniteEx ig = ig();
final IgniteCache<Integer, DbValue> cache = ig.cache(DEFAULT_CACHE_NAME);
X.println("Put start");
int cnt = 20_000;
try (IgniteDataStreamer<Integer, DbValue> st = ig.dataStreamer(DEFAULT_CACHE_NAME)) {
st.allowOverwrite(true);
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
st.addData(i, v0);
}
}
X.println("Get start");
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
assertEquals(v0, cache.get(i));
}
if (indexingEnabled()) {
awaitPartitionMapExchange();
X.println("Query start");
assertEquals(cnt, cache.query(new SqlFieldsQuery("select null from dbvalue")).getAll().size());
int limit = 500;
List<List<?>> res = cache.query(new SqlFieldsQuery("select ival, _val from dbvalue where ival < ? order by ival")
.setArgs(limit)).getAll();
assertEquals(limit, res.size());
for (int i = 0; i < limit; i++) {
List<?> row = res.get(i);
assertEquals(2, row.size());
assertEquals(i, row.get(0));
assertEquals(new DbValue(i, "test-value", i), row.get(1));
}
assertEquals(1, cache.query(new SqlFieldsQuery("select lval from dbvalue where ival = 7899")).getAll().size());
assertEquals(2000, cache.query(new SqlFieldsQuery("select lval from dbvalue where ival >= 5000 and ival < 7000"))
.getAll().size());
String plan = cache.query(new SqlFieldsQuery(
"explain select lval from dbvalue where ival >= 5000 and ival < 7000")).getAll().get(0).get(0).toString();
assertTrue(plan, plan.contains("IVAL_IDX"));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutGetRandomUniqueMultipleObjects() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int cnt = KEYS_COUNT;
Random rnd = new GridRandom();
int[] keys = generateUniqueRandomKeys(cnt, rnd);
X.println("Put start");
for (int i : keys) {
DbValue v0 = new DbValue(i, "test-value", i);
// if (i % 100 == 0)
// X.println(" --> " + i);
cache.put(i, v0);
checkEmpty(internalCache, i);
assertEquals(v0, cache.get(i));
// for (int j : keys) {
// if (j == i)
// break;
//
// assertEquals( i + ", " + j, new DbValue(j, "test-value", j), cache.get(j));
// }
}
X.println("Get start");
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
checkEmpty(internalCache, i);
// X.println(" <-- " + i);
assertEquals(v0, cache.get(i));
}
}
/** */
private static int[] generateUniqueRandomKeys(int cnt, Random rnd) {
Integer[] keys = new Integer[cnt];
for (int i = 0; i < cnt; i++)
keys[i] = i;
Collections.shuffle(Arrays.asList(keys), rnd);
return Arrays.stream(keys).mapToInt(x -> x).toArray();
}
/**
* @throws Exception If failed.
*/
@Test
public void testPutPrimaryUniqueSecondaryDuplicates() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int cnt = KEYS_COUNT;
Random rnd = new GridRandom();
Map<Integer, DbValue> map = new HashMap<>();
int[] keys = generateUniqueRandomKeys(cnt, rnd);
X.println("Put start");
for (int i : keys) {
DbValue v0 = new DbValue(rnd.nextInt(30), "test-value", i);
// X.println(" --> " + i);
cache.put(i, v0);
map.put(i, v0);
checkEmpty(internalCache, i);
assertEquals(v0, cache.get(i));
}
X.println("Get start");
for (int i = 0; i < cnt; i++) {
DbValue v0 = map.get(i);
checkEmpty(internalCache, i);
// X.println(" <-- " + i);
assertEquals(v0, cache.get(i));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutGetRandomNonUniqueMultipleObjects() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int cnt = KEYS_COUNT;
Random rnd = new GridRandom();
Map<Integer, DbValue> map = new HashMap<>();
X.println("Put start");
for (int a = 0; a < cnt; a++) {
int i = rnd.nextInt();
int k = rnd.nextInt(cnt);
DbValue v0 = new DbValue(k, "test-value", i);
// if (a % 100 == 0)
// X.println(" --> " + k + " = " + i);
map.put(k, v0);
cache.put(k, v0);
checkEmpty(internalCache, k);
assertEquals(v0, cache.get(k));
// for (Map.Entry<Integer,DbValue> entry : map.entrySet())
// assertEquals(entry.getValue(), cache.get(entry.getKey()));
}
X.println("Get start: " + map.size());
for (int i : map.keySet()) {
checkEmpty(internalCache, i);
// X.println(" <-- " + i);
assertEquals(map.get(i), cache.get(i));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testPutGetRemoveMultipleForward() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int cnt = KEYS_COUNT;
X.println("Put.");
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
// if (i % 100 == 0)
// X.println(" --> " + i);
cache.put(i, v0);
checkEmpty(internalCache, i);
assertEquals(v0, cache.get(i));
}
X.println("Start removing.");
for (int i = 0; i < cnt; i++) {
if (i % 1000 == 0) {
X.println("-> " + i);
// assertEquals((long)(cnt - i),
// cache.query(new SqlFieldsQuery("select count(*) from dbvalue")).getAll().get(0).get(0));
}
cache.remove(i);
assertNull(cache.get(i));
if (i + 1 < cnt)
assertEquals(new DbValue(i + 1, "test-value", i + 1), cache.get(i + 1));
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testRandomPutGetRemove() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
int cnt = KEYS_COUNT;
Map<Integer, DbValue> map = new HashMap<>(cnt);
long seed = System.currentTimeMillis();
int iterations = SF.apply(90_000);
X.println("Seed: " + seed);
X.println("Iterations total: " + iterations);
Random rnd = new GridRandom(seed);
for (int i = 0; i < iterations; i++) {
if (i % 5000 == 0)
X.println("Iteration #" + i);
int key = rnd.nextInt(cnt);
DbValue v0 = new DbValue(key, "test-value-" + rnd.nextInt(200), rnd.nextInt(500));
switch (rnd.nextInt(3)) {
case 0:
assertEquals(map.put(key, v0), cache.getAndPut(key, v0));
case 1:
assertEquals(map.get(key), cache.get(key));
break;
case 2:
assertEquals(map.remove(key), cache.getAndRemove(key));
assertNull(cache.get(key));
}
}
assertEquals(map.size(), cache.size());
for (Cache.Entry<Integer, DbValue> entry : cache.query(new ScanQuery<Integer, DbValue>()))
assertEquals(map.get(entry.getKey()), entry.getValue());
}
/** */
@Test
public void testPutGetRemoveMultipleBackward() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
int cnt = KEYS_COUNT;
X.println("Put.");
for (int i = 0; i < cnt; i++) {
DbValue v0 = new DbValue(i, "test-value", i);
// if (i % 100 == 0)
// X.println(" --> " + i);
cache.put(i, v0);
checkEmpty(internalCache, i);
assertEquals(v0, cache.get(i));
}
X.println("Start removing in backward direction.");
for (int i = cnt - 1; i >= 0; i--) {
if (i % 1000 == 0) {
X.println("-> " + i);
// assertEquals((long)(cnt - i),
// cache.query(new SqlFieldsQuery("select count(*) from dbvalue")).getAll().get(0).get(0));
}
cache.remove(i);
assertNull(cache.get(i));
if (i - 1 >= 0)
assertEquals(new DbValue(i - 1, "test-value", i - 1), cache.get(i - 1));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testIndexOverwrite() throws Exception {
final IgniteCache<Integer, DbValue> cache = cache(DEFAULT_CACHE_NAME);
GridCacheAdapter<Integer, DbValue> internalCache = internalCache(cache);
X.println("Put start");
int cnt = 10_000;
for (int a = 0; a < cnt; a++) {
DbValue v0 = new DbValue(a, "test-value-" + a, a);
DbKey k0 = new DbKey(a);
cache.put(a, v0);
checkEmpty(internalCache, k0);
}
info("Update start");
for (int k = 0; k < 4000; k++) {
int batchSize = 20;
LinkedHashMap<Integer, DbValue> batch = new LinkedHashMap<>();
for (int i = 0; i < batchSize; i++) {
int a = ThreadLocalRandom.current().nextInt(cnt);
DbValue v0 = new DbValue(a, "test-value-" + a, a);
batch.put(a, v0);
}
cache.putAll(batch);
cache.remove(ThreadLocalRandom.current().nextInt(cnt));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testObjectKey() throws Exception {
IgniteEx ig = ig();
final IgniteCache<DbKey, DbValue> cache = cache("non-primitive");
GridCacheAdapter<DbKey, DbValue> internalCache = internalCache(cache);
int cnt = KEYS_COUNT;
Map<DbKey, DbValue> map = new HashMap<>();
X.println("Put start");
for (int a = 0; a < cnt; a++) {
DbValue v0 = new DbValue(a, "test-value", a);
// if (a % 100 == 0)
// X.println(" --> " + k + " = " + i);
DbKey k0 = new DbKey(a);
map.put(k0, v0);
cache.put(k0, v0);
checkEmpty(internalCache, k0);
// assertEquals(v0, cache.get(k0));
// for (Map.Entry<Integer,DbValue> entry : map.entrySet())
// assertEquals(entry.getValue(), cache.get(entry.getKey()));
}
X.println("Get start: " + map.size());
for (DbKey i : map.keySet()) {
// checkEmpty(internalCache, i);
// X.println(" <-- " + i);
assertEquals(map.get(i), cache.get(i));
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testIterators() throws Exception {
IgniteEx ignite = ig();
IgniteCache<DbKey, DbValue> cache = ignite.cache("non-primitive");
Affinity<Object> aff = ignite.affinity(cache.getName());
Map<UUID, Integer> cntrs = new HashMap<>();
Map<Integer, Integer> partCntrs = new HashMap<>();
final int ENTRIES = 10_000;
for (int i = 0; i < ENTRIES; i++) {
DbKey k = new DbKey(i);
DbValue v = new DbValue(i, "test-value", i);
cache.put(k, v);
UUID nodeId = aff.mapKeyToNode(k).id();
Integer cntr = cntrs.get(nodeId);
if (cntr == null)
cntr = 1;
else
cntr += 1;
cntrs.put(nodeId, cntr);
int part = aff.partition(k);
Integer partCntr = partCntrs.get(part);
if (partCntr == null)
partCntr = 1;
else
partCntr += 1;
partCntrs.put(part, partCntr);
}
checkLocalEntries(ENTRIES, cntrs);
checkLocalScan(ENTRIES, cntrs);
checkScan(ENTRIES);
checkScanPartition(partCntrs);
}
/**
* @param total Expected total entries.
* @param cntrs Expected per-node entries count.
*/
private void checkLocalEntries(int total, Map<UUID, Integer> cntrs) {
Set<DbKey> allKeys = new HashSet<>();
for (int i = 0; i < gridCount(); i++) {
Ignite ignite0 = grid(i);
IgniteCache<DbKey, DbValue> cache0 = ignite0.cache("non-primitive");
int cnt = 0;
for (Cache.Entry<DbKey, DbValue> e : cache0.localEntries(CachePeekMode.PRIMARY)) {
cnt++;
allKeys.add(e.getKey());
assertEquals(e.getKey().val, e.getValue().iVal);
}
assertEquals(cntrs.get(ignite0.cluster().localNode().id()), (Integer)cnt);
}
assertEquals(total, allKeys.size());
}
/**
* @param total Expected total entries.
* @param cntrs Expected per-node entries count.
*/
private void checkLocalScan(int total, Map<UUID, Integer> cntrs) {
Set<DbKey> allKeys = new HashSet<>();
for (int i = 0; i < gridCount(); i++) {
Ignite ignite0 = grid(i);
IgniteCache<DbKey, DbValue> cache0 = ignite0.cache("non-primitive");
int cnt = 0;
ScanQuery<DbKey, DbValue> qry = new ScanQuery<>();
qry.setLocal(true);
QueryCursor<Cache.Entry<DbKey, DbValue>> cur = cache0.query(qry);
Map<Integer, Integer> partCntrs = new HashMap<>();
Affinity<Object> aff = ignite0.affinity(cache0.getName());
for (Cache.Entry<DbKey, DbValue> e : cur) {
cnt++;
allKeys.add(e.getKey());
assertEquals(e.getKey().val, e.getValue().iVal);
int part = aff.partition(e.getKey());
Integer partCntr = partCntrs.get(part);
if (partCntr == null)
partCntr = 1;
else
partCntr += 1;
partCntrs.put(part, partCntr);
}
assertEquals(cntrs.get(ignite0.cluster().localNode().id()), (Integer)cnt);
checkScanPartition(ignite0, cache0, partCntrs, true);
}
assertEquals(total, allKeys.size());
}
/**
* @param total Expected total entries.
*/
private void checkScan(int total) {
for (int i = 0; i < gridCount(); i++) {
Set<DbKey> allKeys = new HashSet<>();
Ignite ignite0 = grid(i);
IgniteCache<DbKey, DbValue> cache0 = ignite0.cache("non-primitive");
ScanQuery<DbKey, DbValue> qry = new ScanQuery<>();
QueryCursor<Cache.Entry<DbKey, DbValue>> cur = cache0.query(qry);
for (Cache.Entry<DbKey, DbValue> e : cur) {
allKeys.add(e.getKey());
assertEquals(e.getKey().val, e.getValue().iVal);
}
assertEquals(total, allKeys.size());
}
}
/**
* @param partCntrs Expected per-partition entries count.
*/
private void checkScanPartition(Map<Integer, Integer> partCntrs) {
for (int i = 0; i < gridCount(); i++) {
Ignite ignite0 = grid(i);
IgniteCache<DbKey, DbValue> cache0 = ignite0.cache("non-primitive");
checkScanPartition(ignite0, cache0, partCntrs, false);
}
}
/**
* @param partCntrs Expected per-partition entries count.
*/
private void checkScanPartition(Ignite ignite,
IgniteCache<DbKey, DbValue> cache,
Map<Integer, Integer> partCntrs,
boolean loc) {
Affinity<Object> aff = ignite.affinity(cache.getName());
int parts = aff.partitions();
for (int p = 0; p < parts; p++) {
ScanQuery<DbKey, DbValue> qry = new ScanQuery<>();
qry.setPartition(p);
qry.setLocal(loc);
if (loc && !ignite.cluster().localNode().equals(aff.mapPartitionToNode(p)))
continue;
QueryCursor<Cache.Entry<DbKey, DbValue>> cur = cache.query(qry);
Set<DbKey> allKeys = new HashSet<>();
for (Cache.Entry<DbKey, DbValue> e : cur) {
allKeys.add(e.getKey());
assertEquals(e.getKey().val, e.getValue().iVal);
}
Integer exp = partCntrs.get(p);
if (exp == null)
exp = 0;
assertEquals(exp, (Integer)allKeys.size());
}
}
/** */
private void checkEmpty(final GridCacheAdapter internalCache, final Object key) throws Exception {
if (internalCache.isNear()) {
checkEmpty(((GridNearCacheAdapter)internalCache).dht(), key);
return;
}
GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return internalCache.peekEx(key) == null;
}
}, 5000);
assertNull(internalCache.peekEx(key));
}
}
|
apache/flex-blazeds | 37,469 | core/src/main/java/flex/messaging/io/amfx/AmfxInput.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flex.messaging.io.amfx;
import java.io.ByteArrayInputStream;
import java.io.Externalizable;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Date;
import java.util.Dictionary;
import java.util.EmptyStackException;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.xml.sax.Attributes;
import flex.messaging.MessageException;
import flex.messaging.io.AbstractProxy;
import flex.messaging.io.ArrayCollection;
import flex.messaging.io.BeanProxy;
import flex.messaging.io.ClassAliasRegistry;
import flex.messaging.io.PropertyProxy;
import flex.messaging.io.PropertyProxyRegistry;
import flex.messaging.io.SerializationContext;
import flex.messaging.io.SerializationException;
import flex.messaging.io.TypeMarshallingContext;
import flex.messaging.io.amf.ASObject;
import flex.messaging.io.amf.ActionMessage;
import flex.messaging.io.amf.Amf3Input;
import flex.messaging.io.amf.AmfTrace;
import flex.messaging.io.amf.MessageBody;
import flex.messaging.io.amf.MessageHeader;
import flex.messaging.util.ClassUtil;
import flex.messaging.util.Hex;
import flex.messaging.util.XMLUtil;
/**
* Context for AMFX specific SAX handler.Contains start and end tag handlers for each of
* the XML elements that occur in an AMFX request. The AmfxMessageDeserializer enforces
* a naming convention for these handlers of xyz_start for the start handler and xyz_end
* for the end handler of element xyz.
* <p>
* Note that this context MUST be reset if reused between AMFX packet parsings.
*
* @see AmfxMessageDeserializer
* @see AmfxOutput
*/
public class AmfxInput {
/**
* This is the initial capacity that will be used for AMF arrays that have
* length greater than 1024.
*/
public static final int INITIAL_ARRAY_CAPACITY = 1024;
private SerializationContext context;
private BeanProxy beanproxy = new BeanProxy();
private final ArrayList objectTable;
private final ArrayList stringTable;
private final ArrayList traitsTable;
private StringBuffer text;
private ActionMessage message;
private MessageHeader currentHeader;
private MessageBody currentBody;
private Stack objectStack;
private Stack proxyStack;
private Stack arrayPropertyStack;
private Stack ecmaArrayIndexStack;
private Stack strictArrayIndexStack;
private Stack dictionaryStack;
private Stack traitsStack;
private boolean isStringReference;
private boolean isTraitProperty;
/*
* DEBUG LOGGING
*/
protected boolean isDebug;
protected AmfTrace trace;
/**
* Constructor.
* Construct an AmfxInput by passing in a <code>SerialziationContext</code> object
*
* @param context the <code>SerialziationContext</code> object
*/
public AmfxInput(SerializationContext context) {
this.context = context;
stringTable = new ArrayList(64);
objectTable = new ArrayList(64);
traitsTable = new ArrayList(10);
objectStack = new Stack();
proxyStack = new Stack();
arrayPropertyStack = new Stack();
dictionaryStack = new Stack();
strictArrayIndexStack = new Stack();
ecmaArrayIndexStack = new Stack();
traitsStack = new Stack();
text = new StringBuffer(32);
}
/**
* Reset the AmfxInput object.
*/
public void reset() {
stringTable.clear();
objectTable.clear();
traitsTable.clear();
objectStack.clear();
proxyStack.clear();
arrayPropertyStack.clear();
dictionaryStack.clear();
traitsStack.clear();
currentBody = null;
currentHeader = null;
TypeMarshallingContext marshallingContext = TypeMarshallingContext.getTypeMarshallingContext();
marshallingContext.reset();
}
/**
* Set Debug trace.
*
* @param trace current <code>AmfTrace</code> setting
*/
public void setDebugTrace(AmfTrace trace) {
this.trace = trace;
isDebug = this.trace != null;
}
/**
* Set Action Message.
*
* @param msg current <code>ActionMessage</code>
*/
public void setActionMessage(ActionMessage msg) {
message = msg;
}
/**
* Read object from the AmfxInput object.
*
* @return currently return null, not supported
* @throws IOException when reading the object has the IOException
*/
public Object readObject() throws IOException {
return null;
}
/**
* Append a string to text.
* XML Considerations
*
* @param s the String to append
*/
public void text(String s) {
text.append(s);
}
//
// AMFX Message Structure
//
/**
* Start the amfx process by setting the ActionMessage version.
*
* @param attributes current Attributes
*/
public void start_amfx(Attributes attributes) {
String ver = attributes.getValue("ver");
int version = ActionMessage.CURRENT_VERSION;
if (ver != null) {
try {
version = Integer.parseInt(ver);
} catch (NumberFormatException ex) {
throw new MessageException("Unknown version: " + ver);
}
}
if (isDebug)
trace.version(version);
message.setVersion(version);
}
/**
* End the Amfx process.
*/
public void end_amfx() {
}
/**
* Start the process of message headers.
*
* @param attributes current Attributes
*/
public void start_header(Attributes attributes) {
if (currentHeader != null || currentBody != null)
throw new MessageException("Unexpected header tag.");
currentHeader = new MessageHeader();
String name = attributes.getValue("name");
currentHeader.setName(name);
String mu = attributes.getValue("mustUnderstand");
boolean mustUnderstand = false;
if (mu != null) {
mustUnderstand = Boolean.valueOf(mu).booleanValue();
currentHeader.setMustUnderstand(mustUnderstand);
}
if (isDebug)
trace.startHeader(name, mustUnderstand, message.getHeaderCount());
}
/**
* End process of message headers.
*/
public void end_header() {
message.addHeader(currentHeader);
currentHeader = null;
if (isDebug)
trace.endHeader();
}
/**
* Start process of the message body.
*
* @param attributes current Attributes
*/
public void start_body(Attributes attributes) {
if (currentBody != null || currentHeader != null)
throw new MessageException("Unexpected body tag.");
currentBody = new MessageBody();
if (isDebug)
trace.startMessage("", "", message.getBodyCount());
}
/**
* End process of the message body.
*/
public void end_body() {
message.addBody(currentBody);
currentBody = null;
if (isDebug)
trace.endMessage();
}
//
// ActionScript Types
//
/**
* Start process of the Action Script type Array.
*
* @param attributes current Attributes
*/
public void start_array(Attributes attributes) {
int length = 10;
String len = attributes.getValue("length");
if (len != null) {
try {
len = len.trim();
length = Integer.parseInt(len);
if (length < 0)
throw new NumberFormatException();
} catch (NumberFormatException ex) {
throw new MessageException("Invalid array length: " + len);
}
}
String ecma = attributes.getValue("ecma");
boolean isECMA = "true".equalsIgnoreCase(ecma);
Object array;
boolean useListTemporarily = false;
if (isECMA) {
array = ClassUtil.createDefaultInstance(HashMap.class, null, true /*validate*/);
} else {
// Don't instantiate List/Array right away with the supplied size if it is more than
// INITIAL_ARRAY_CAPACITY in case the supplied size has been tampered. This at least
// requires the user to pass in the actual objects for the List/Array to grow beyond.
if (context.legacyCollection || length > INITIAL_ARRAY_CAPACITY) {
useListTemporarily = !context.legacyCollection;
ClassUtil.validateCreation(ArrayList.class);
int initialCapacity = length < INITIAL_ARRAY_CAPACITY ? length : INITIAL_ARRAY_CAPACITY;
array = new ArrayList(initialCapacity);
} else {
ClassUtil.validateCreation(Object[].class);
array = new Object[length];
}
}
array = setValue(array);
ecmaArrayIndexStack.push(new int[]{0});
strictArrayIndexStack.push(new int[]{0});
objectTable.add(array);
// Don't add the array to the object stack if the List is being used temporarily
// for the length tampering detection. In that case, setValue method will add
// an ObjectPropertyValueTuple to the object stack instead.
if (!useListTemporarily)
objectStack.push(array);
proxyStack.push(null);
if (isECMA) {
if (isDebug)
trace.startECMAArray(objectTable.size() - 1);
} else {
if (isDebug)
trace.startAMFArray(objectTable.size() - 1);
}
}
/**
* End process of Action Script type Array.
*/
public void end_array() {
try {
Object obj = objectStack.pop();
if (obj instanceof ObjectPropertyValueTuple) {
// Means List was being used temporarily to guard against array length tampering.
// Convert back to Object array and set it on the parent object using the proxy
// and property saved in the tuple.
ObjectPropertyValueTuple tuple = (ObjectPropertyValueTuple) obj;
int objectId = objectTable.indexOf(tuple.value);
Object newValue = ((ArrayList) tuple.value).toArray();
objectTable.set(objectId, newValue);
tuple.proxy.setValue(tuple.obj, tuple.property, newValue);
}
proxyStack.pop();
ecmaArrayIndexStack.pop();
strictArrayIndexStack.pop();
} catch (EmptyStackException ex) {
throw new MessageException("Unexpected end of array");
}
if (isDebug)
trace.endAMFArray();
}
public void start_dictionary(Attributes attributes) {
int length = 10;
String len = attributes.getValue("length");
if (len != null) {
try {
len = len.trim();
length = Integer.parseInt(len);
if (length < 0)
throw new NumberFormatException();
} catch (NumberFormatException ex) {
throw new MessageException("Invalid array length: " + len);
}
}
Hashtable dictionary = (Hashtable) ClassUtil.createDefaultInstance(Hashtable.class, null, true /*validate*/);
setValue(dictionary);
objectTable.add(dictionary);
objectStack.push(dictionary);
proxyStack.push(null);
if (isDebug)
trace.startAMFDictionary(objectTable.size() - 1);
}
public void end_dictionary() {
try {
objectStack.pop();
proxyStack.pop();
} catch (EmptyStackException ex) {
throw new MessageException("Unexpected end of dictionary");
}
if (isDebug)
trace.endAMFDictionary();
}
// <bytearray>010F0A</bytearray>
/**
* Start process of the Action Script type ByteArray.
*
* @param attributes current Attributes
*/
public void start_bytearray(Attributes attributes) {
text.delete(0, text.length());
}
/**
* End process of the Action Script type ByteArray.
*/
public void end_bytearray() {
ClassUtil.validateCreation(byte[].class);
String bs = text.toString().trim();
Hex.Decoder decoder = new Hex.Decoder();
decoder.decode(bs);
byte[] value = decoder.drain();
setValue(value);
if (isDebug)
trace.startByteArray(objectTable.size() - 1, bs.length());
}
/**
* Start process of the Action Script type Date.
*
* @param attributes current Attributes
*/
public void start_date(Attributes attributes) {
text.delete(0, text.length());
}
/**
* End process of the Action Script type Date.
*/
public void end_date() {
ClassUtil.validateCreation(Date.class);
String d = text.toString().trim();
try {
long l = Long.parseLong(d);
Date date = new Date(l);
setValue(date);
objectTable.add(date); //Dates can be sent by reference
if (isDebug)
trace.write(date);
} catch (NumberFormatException ex) {
throw new MessageException("Invalid date: " + d);
}
}
/**
* Start process of the Action Script type Double.
*
* @param attributes current Attributes
*/
public void start_double(Attributes attributes) {
text.delete(0, text.length());
}
/**
* End process of the Action Script type Double.
*/
public void end_double() {
ClassUtil.validateCreation(Double.class);
String ds = text.toString().trim();
try {
Double d = Double.valueOf(ds);
setValue(d);
if (isDebug)
trace.write(d.doubleValue());
} catch (NumberFormatException ex) {
throw new MessageException("Invalid double: " + ds);
}
}
/**
* Start process of the Action Script type False.
*
* @param attributes current Attributes
*/
public void start_false(Attributes attributes) {
ClassUtil.validateCreation(Boolean.class);
setValue(Boolean.FALSE);
if (isDebug)
trace.write(false);
}
/**
* Start process of the Action Script type False.
*/
public void end_false() {
}
/**
* Start process of Item.
*
* @param attributes current Attributes
*/
public void start_item(Attributes attributes) {
String name = attributes.getValue("name");
if (name != null) {
name = name.trim();
if (name.length() <= 0)
throw new MessageException("Array item names cannot be the empty string.");
char c = name.charAt(0);
if (!(Character.isLetterOrDigit(c) || c == '_'))
throw new MessageException("Invalid item name: " + name +
". Array item names must start with a letter, a digit or the underscore '_' character.");
} else {
throw new MessageException("Array item must have a name attribute.");
}
//Check that we're expecting an ECMA array
Object o = objectStackPeek();
if (!(o instanceof Map)) {
throw new MessageException("Unexpected array item name: " + name +
". Please set the ecma attribute to 'true'.");
}
arrayPropertyStack.push(name);
}
/**
* End process of Item.
*/
public void end_item() {
arrayPropertyStack.pop();
}
/**
* Start process of the Action Script type Int.
*
* @param attributes current Attributes
*/
public void start_int(Attributes attributes) {
text.delete(0, text.length());
}
/**
* End process of the Action Script type Int.
*/
public void end_int() {
ClassUtil.validateCreation(Integer.class);
String is = text.toString().trim();
try {
Integer i = Integer.valueOf(is);
setValue(i);
if (isDebug)
trace.write(i.intValue());
} catch (NumberFormatException ex) {
throw new MessageException("Invalid int: " + is);
}
}
/**
* Start process of the Action Script type NULL.
*
* @param attributes current Attributes
*/
public void start_null(Attributes attributes) {
setValue(null);
if (isDebug)
trace.writeNull();
}
/**
* Start process of the Action Script type NULL.
*/
public void end_null() {
}
// <object type="com.my.Class">
/**
* Start process of type Object.
*
* @param attributes current Attributes
*/
public void start_object(Attributes attributes) {
PropertyProxy proxy = null;
String type = attributes.getValue("type");
if (type != null) {
type = type.trim();
}
Object object;
if (type != null && type.length() > 0) {
// Check for any registered class aliases
String aliasedClass = ClassAliasRegistry.getRegistry().getClassName(type);
if (aliasedClass != null)
type = aliasedClass;
if (type == null || type.length() == 0) {
object = ClassUtil.createDefaultInstance(ASObject.class, null, true /*validate*/);
} else if (type.startsWith(">")) // Handle [RemoteClass] (no server alias)
{
object = ClassUtil.createDefaultInstance(ASObject.class, null, true /*validate*/);
((ASObject) object).setType(type);
} else if (context.instantiateTypes || type.startsWith("flex.")) {
object = getInstantiatedObject(type, proxy);
} else {
// Just return type info with an ASObject...
object = ClassUtil.createDefaultInstance(ASObject.class, null, true /*validate*/);
((ASObject) object).setType(type);
}
} else {
// TODO: QUESTION: Pete, Investigate why setValue for ASObject is delayed to endObject
ClassUtil.validateCreation(ASObject.class);
object = new ASObject(type);
}
if (proxy == null)
proxy = PropertyProxyRegistry.getProxyAndRegister(object);
objectStack.push(object);
proxyStack.push(proxy);
objectTable.add(object);
if (isDebug)
trace.startAMFObject(type, objectTable.size() - 1);
}
// </object>
/**
* End process of type Object.
*/
public void end_object() {
if (!traitsStack.empty())
traitsStack.pop();
if (!objectStack.empty()) {
Object obj = objectStack.pop();
PropertyProxy proxy = (PropertyProxy) proxyStack.pop();
Object newObj = proxy == null ? obj : proxy.instanceComplete(obj);
if (newObj != obj) {
int i;
// Find the index in the list of the old objct and replace it with
// the new one.
for (i = 0; i < objectTable.size(); i++)
if (objectTable.get(i) == obj)
break;
if (i != objectTable.size())
objectTable.set(i, newObj);
obj = newObj;
}
setValue(obj);
} else {
throw new MessageException("Unexpected end of object.");
}
if (isDebug)
trace.endAMFObject();
}
/**
* Start process of reference.
*
* @param attributes current Attributes
*/
public void start_ref(Attributes attributes) {
String id = attributes.getValue("id");
if (id != null) {
try {
int i = Integer.parseInt(id);
Object o = objectTable.get(i);
setValue(o);
if (isDebug)
trace.writeRef(i);
} catch (NumberFormatException ex) {
throw new MessageException("Invalid object reference: " + id);
} catch (IndexOutOfBoundsException ex) {
throw new MessageException("Unknown object reference: " + id);
}
} else {
throw new MessageException("Unknown object reference: " + id);
}
}
/**
* End process of reference.
*/
public void end_ref() {
}
/**
* Start process of the Action Script type String.
*
* @param attributes current Attributes
*/
public void start_string(Attributes attributes) {
String id = attributes.getValue("id");
if (id != null) {
isStringReference = true;
try {
int i = Integer.parseInt(id);
String s = (String) stringTable.get(i);
if (isTraitProperty) {
TraitsContext traitsContext = (TraitsContext) traitsStack.peek();
traitsContext.add(s);
} else {
ClassUtil.validateCreation(String.class);
setValue(s);
}
} catch (NumberFormatException ex) {
throw new MessageException("Invalid string reference: " + id);
} catch (IndexOutOfBoundsException ex) {
throw new MessageException("Unknown string reference: " + id);
}
} else {
text.delete(0, text.length());
isStringReference = false;
}
}
/**
* End process of the Action Script type String.
*/
public void end_string() {
if (!isStringReference) {
String s = text.toString();
// Special case the empty string as it isn't counted as in
// the string reference table
if (s.length() > 0) {
// Traits won't contain CDATA
if (!isTraitProperty)
s = unescapeCloseCDATA(s);
stringTable.add(s);
}
if (isTraitProperty) {
TraitsContext traitsContext = (TraitsContext) traitsStack.peek();
traitsContext.add(s);
} else {
ClassUtil.validateCreation(String.class);
setValue(s);
if (isDebug)
trace.writeString(s);
}
}
}
/**
* Start process of Traits.
*
* @param attributes current Attributes
*/
public void start_traits(Attributes attributes) {
if (!objectStack.empty()) {
List traitsList = new ArrayList();
TraitsContext traitsContext = new TraitsContext(traitsList);
traitsStack.push(traitsContext);
String id = attributes.getValue("id");
if (id != null) {
try {
int i = Integer.parseInt(id);
List l = (List) traitsTable.get(i);
Iterator it = l.iterator();
while (it.hasNext()) {
String prop = (String) it.next();
traitsList.add(prop);
}
} catch (NumberFormatException ex) {
throw new MessageException("Invalid traits reference: " + id);
} catch (IndexOutOfBoundsException ex) {
throw new MessageException("Unknown traits reference: " + id);
}
} else {
boolean externalizable = false;
String ext = attributes.getValue("externalizable");
if (ext != null) {
externalizable = "true".equals(ext.trim());
}
Object obj = objectStackPeek();
if (externalizable && !(obj instanceof Externalizable)) {
//Class '{className}' must implement java.io.Externalizable to receive client IExternalizable instances.
SerializationException ex = new SerializationException();
ex.setMessage(10305, new Object[]{obj.getClass().getName()});
throw ex;
}
traitsTable.add(traitsList);
}
isTraitProperty = true;
} else {
throw new MessageException("Unexpected traits");
}
}
/**
* End process of Traits.
*/
public void end_traits() {
isTraitProperty = false;
}
/**
* Start process of the Action Script type True.
*
* @param attributes current Attributes
*/
public void start_true(Attributes attributes) {
ClassUtil.validateCreation(Boolean.class);
setValue(Boolean.TRUE);
if (isDebug)
trace.write(true);
}
/**
* Start process of the Action Script type True.
*/
public void end_true() {
}
/**
* Start process of the Action Script type undefined.
*
* @param attributes current Attributes
*/
public void start_undefined(Attributes attributes) {
setValue(null);
if (isDebug)
trace.writeUndefined();
}
/**
* End process of the Action Script type undefined.
*/
public void end_undefined() {
}
/**
* Start process of XML.
*
* @param attributes current Attributes
*/
public void start_xml(Attributes attributes) {
text.delete(0, text.length());
}
/**
* End process of XML.
*/
public void end_xml() {
String xml = text.toString();
xml = unescapeCloseCDATA(xml);
// Validation performed in XMLUtil#stringToDocument.
Object value = XMLUtil.stringToDocument(xml, !(context.legacyXMLNamespaces),
context.allowXmlDoctypeDeclaration, context.allowXmlExternalEntityExpansion);
setValue(value);
}
private String unescapeCloseCDATA(String s) {
//Only check if string could possibly have an encoded closing for a CDATA "]]>"
if (s.length() > 5 && s.indexOf("]]>") != -1) {
s = s.replaceAll("]]>", "]]>");
}
return s;
}
private Object setValue(Object value) {
if (objectStack.empty()) {
if (currentHeader != null)
currentHeader.setData(value);
else if (currentBody != null)
currentBody.setData(value);
else
throw new MessageException("Unexpected value: " + value);
return value;
}
// ActionScript Data
Object obj = objectStackPeek();
// <object type="..."> <traits externalizable="true">
if (obj instanceof Externalizable) {
if (value != null && value.getClass().isArray() && Byte.TYPE.equals(value.getClass().getComponentType())) {
Externalizable extern = (Externalizable) obj;
Amf3Input objIn = new Amf3Input(context);
byte[] ba = (byte[]) value;
ByteArrayInputStream baIn = new ByteArrayInputStream(ba);
try {
//objIn.setDebugTrace(trace);
objIn.setInputStream(baIn);
extern.readExternal(objIn);
} catch (ClassNotFoundException ex) {
throw new MessageException("Error while reading Externalizable class " + extern.getClass().getName(), ex);
} catch (IOException ex) {
throw new MessageException("Error while reading Externalizable class " + extern.getClass().getName(), ex);
} finally {
try {
objIn.close();
} catch (IOException ex) {
}
}
} else {
throw new MessageException("Error while reading Externalizable class. Value must be a byte array.");
}
}
// <object>
else if (obj instanceof ASObject) {
String prop;
TraitsContext traitsContext = (TraitsContext) traitsStack.peek();
try {
prop = traitsContext.next();
} catch (IndexOutOfBoundsException ex) {
throw new MessageException("Object has no trait info for value: " + value);
}
ASObject aso = (ASObject) obj;
ClassUtil.validateAssignment(aso, prop, value);
aso.put(prop, value);
if (isDebug)
trace.namedElement(prop);
}
// <array ecma="false"> in ArrayList form
else if (obj instanceof ArrayList && !(obj instanceof ArrayCollection)) {
ArrayList list = (ArrayList) obj;
ClassUtil.validateAssignment(list, list.size(), value);
list.add(value);
if (isDebug)
trace.arrayElement(list.size() - 1);
}
// <array ecma="false"> in Object[] form
else if (obj.getClass().isArray()) {
if (!strictArrayIndexStack.empty()) {
int[] indexObj = (int[]) strictArrayIndexStack.peek();
int index = indexObj[0];
if (Array.getLength(obj) > index) {
ClassUtil.validateAssignment(obj, index, value);
Array.set(obj, index, value);
} else {
throw new MessageException("Index out of bounds at: " + index + " cannot set array value: " + value + "");
}
indexObj[0]++;
}
} else if (obj instanceof Map) {
if (obj instanceof Dictionary) // <dictionary>
{
Dictionary dict = (Dictionary) obj;
if (!dictionaryStack.empty()) {
Object key = dictionaryStack.pop();
if (isDebug) trace.addDictionaryEquals();
ClassUtil.validateAssignment(dict, key.toString(), value);
dict.put(key, value);
} else {
if (isDebug) trace.startDictionaryElement();
dictionaryStack.push(value);
}
return value;
}
Map map = (Map) obj; // <array ecma="true">
// <item name="prop">
if (!arrayPropertyStack.empty()) {
String prop = (String) arrayPropertyStack.peek();
ClassUtil.validateAssignment(map, prop, value);
map.put(prop, value);
if (isDebug)
trace.namedElement(prop);
return value;
}
// Mixed content, auto-generate string for ECMA Array index
if (!ecmaArrayIndexStack.empty()) {
int[] index = (int[]) ecmaArrayIndexStack.peek();
String prop = String.valueOf(index[0]);
index[0]++;
ClassUtil.validateAssignment(map, prop, value);
map.put(prop, value);
if (isDebug)
trace.namedElement(prop);
}
}
// <object type="...">
else {
value = setObjectValue(obj, value);
}
return value;
}
private Object setObjectValue(Object obj, Object value) {
String prop;
TraitsContext traitsContext = (TraitsContext) traitsStack.peek();
try {
prop = traitsContext.next();
} catch (IndexOutOfBoundsException ex) {
throw new MessageException("Object has no trait info for value: " + value, ex);
}
try {
// Then check if there's a more suitable proxy now that we have an instance
PropertyProxy proxy = (PropertyProxy) proxyStack.peek();
if (proxy == null)
proxy = beanproxy;
proxy.setValue(obj, prop, value);
// Reset value in case it was changed by the proxy except empty lists.
// Proxy converts empty lists to empty arrays in remoting messages.
// Emply arrays are useless as containers and cause errors.
if (!(value instanceof ArrayList && ((ArrayList) value).size() == 0)) {
Object newValue = proxy.getValue(obj, prop);
if (value != newValue)
value = newValue;
}
if (value instanceof ArrayList && !(value instanceof ArrayCollection)
&& !context.legacyCollection) {
// Means List is being used temporarily, see start_array method for explanation.
objectStack.push(new ObjectPropertyValueTuple(proxy, obj, prop, value));
}
} catch (Exception ex) {
throw new MessageException("Failed to set property '" + prop + "' with value: " + value, ex);
}
if (isDebug)
trace.namedElement(prop);
return value;
}
/**
* Utility method to peek the object in the object stack which can be an Object
* or an <tt>ObjectPropertyValueTuple</tt>.
*
* @return The Object at the top of the object stack.
*/
private Object objectStackPeek() {
Object obj = objectStack.peek();
return (obj instanceof ObjectPropertyValueTuple) ? ((ObjectPropertyValueTuple) obj).value : obj;
}
private Object getInstantiatedObject(String className, PropertyProxy proxy) {
Class<?> desiredClass = null;
try {
desiredClass = AbstractProxy.getClassFromClassName(className);
} catch (MessageException me) {
// Type not found but don't mind using ASObject for the missing type.
if (me.getCode().startsWith(MessageException.CODE_SERVER_RESOURCE_UNAVAILABLE)
&& context.createASObjectForMissingType) {
ASObject object = (ASObject) ClassUtil.createDefaultInstance(ASObject.class, null, true /*validate*/);
object.setType(className);
return object;
}
throw me; // Rethrow.
}
// Type exists.
proxy = PropertyProxyRegistry.getRegistry().getProxyAndRegister(desiredClass);
return proxy == null ? ClassUtil.createDefaultInstance(desiredClass, null, true /*validate*/) :
proxy.createInstance(className); // Validation is performed in the proxy.
}
/**
* Helper class used in the case where the supplied array length is more than the
* INITIAL_ARRAY_CAPACITY. In that case, the List/Object[] on the server is not
* initialized with that length in case the supplied length has been tampered.
* Instead, a temporary List of length INITIAL_ARRAY_CAPACITY is constructed and List
* grows as array members are supplied from the client. This way the user is required to
* pass in the actual array members for the List to grow. This helper class is needed to
* convert the temporary List into Object[] if needed.
*/
private static class ObjectPropertyValueTuple {
private PropertyProxy proxy;
private Object obj;
private String property;
private Object value;
private ObjectPropertyValueTuple(PropertyProxy proxy, Object obj, String property, Object value) {
this.proxy = proxy;
this.obj = obj;
this.property = property;
this.value = value;
}
}
private class TraitsContext {
private List traits;
private int counter;
private TraitsContext(List traits) {
this.traits = traits;
}
private void add(String trait) {
trait = trait.trim();
if (trait.length() <= 0)
throw new MessageException("Traits cannot be the empty string.");
char c = trait.charAt(0);
if (!(Character.isLetterOrDigit(c) || c == '_'))
throw new MessageException("Invalid trait name: " + trait +
". Object property names must start with a letter, a digit or the underscore '_' character.");
traits.add(trait);
}
private String next() {
String trait = (String) traits.get(counter);
counter++;
return trait;
}
}
}
|
apache/manifoldcf | 37,726 | connectors/gridfs/connector/src/main/java/org/apache/manifoldcf/crawler/connectors/gridfs/GridFSRepositoryConnector.java | /**
* Copyright 2014 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.manifoldcf.crawler.connectors.gridfs;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.DBTCPConnector;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.InputStream;
import java.net.UnknownHostException;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.manifoldcf.agents.interfaces.RepositoryDocument;
import org.apache.manifoldcf.agents.interfaces.ServiceInterruption;
import org.apache.manifoldcf.core.interfaces.ConfigParams;
import org.apache.manifoldcf.core.interfaces.Specification;
import org.apache.manifoldcf.core.interfaces.IHTTPOutput;
import org.apache.manifoldcf.core.interfaces.IPasswordMapperActivity;
import org.apache.manifoldcf.core.interfaces.IPostParameters;
import org.apache.manifoldcf.core.interfaces.IThreadContext;
import org.apache.manifoldcf.core.interfaces.ManifoldCFException;
import org.apache.manifoldcf.crawler.connectors.BaseRepositoryConnector;
import org.apache.manifoldcf.crawler.interfaces.IProcessActivity;
import org.apache.manifoldcf.crawler.interfaces.ISeedingActivity;
import org.apache.manifoldcf.crawler.interfaces.IExistingVersions;
import org.apache.manifoldcf.crawler.system.Logging;
import org.bson.types.ObjectId;
/**
*
* @author molgun
*/
public class GridFSRepositoryConnector extends BaseRepositoryConnector {
/**
* Activity name for the activity record.
*/
protected static final String ACTIVITY_FETCH = "fetch";
/**
* Server name for declaring bin name.
*/
protected static final String SERVER = "MongoDB - GridFS";
/**
* Session expiration milliseconds.
*/
protected static final long SESSION_EXPIRATION_MILLISECONDS = 30000L;
/**
* Endpoint username.
*/
protected String username = null;
/**
* Endpoint password.
*/
protected String password = null;
/**
* Endpoint host.
*/
protected String host = null;
/**
* Endpoint port.
*/
protected String port = null;
/**
* Endpoint db.
*/
protected String db = null;
/**
* Endpoint bucket.
*/
protected String bucket = null;
/**
* Endpoint url.
*/
protected String url = null;
/**
* Endpoint acl.
*/
protected String acl = null;
/**
* Endpoint denyAcl.
*/
protected String denyAcl = null;
/**
* MongoDB session.
*/
protected DB session = null;
/**
* Last session fetch time.
*/
protected long lastSessionFetch = -1L;
/**
* Forward to the javascript to check the configuration parameters.
*/
private static final String EDIT_CONFIG_HEADER_FORWARD = "editConfiguration.js";
/**
* Forward to the HTML template to view the configuration parameters.
*/
private static final String VIEW_CONFIG_FORWARD = "viewConfiguration.html";
/**
* Forward to the HTML template to edit the configuration parameters.
*/
private static final String EDIT_CONFIG_FORWARD_SERVER = "editConfiguration_Server.html";
/**
* GridFS server tab name.
*/
private static final String GRIDFS_SERVER_TAB_RESOURCE = "GridFSConnector.Server";
/**
* Tab name parameter for managing the view of the Web UI.
*/
private static final String TAB_NAME_PARAM = "TabName";
/**
* Constructer.
*/
public GridFSRepositoryConnector() {
super();
}
/**
* Tell the world what model this connector uses for addSeedDocuments().
* This must return a model value as specified above. The connector does not
* have to be connected for this method to be called.
*
* @return the model type value.
*/
@Override
public String[] getBinNames(String documentIdentifier) {
return new String[]{host};
}
/**
* Tell the world what model this connector uses for addSeedDocuments().
* This must return a model value as specified above. The connector does not
* have to be connected for this method to be called.
*
* @return the model type value.
*/
@Override
public int getConnectorModel() {
return super.getConnectorModel();
}
/**
* Return the list of activities that this connector supports (i.e. writes
* into the log). The connector does not have to be connected for this
* method to be called.
*
* @return the list.
*/
@Override
public String[] getActivitiesList() {
return new String[]{ACTIVITY_FETCH};
}
/**
* Connect.
*
* @param configParams is the set of configuration parameters, which in this
* case describe the root directory.
*/
@Override
public void connect(ConfigParams configParams) {
super.connect(configParams);
username = params.getParameter(GridFSConstants.USERNAME_PARAM);
password = params.getObfuscatedParameter(GridFSConstants.PASSWORD_PARAM);
host = params.getParameter(GridFSConstants.HOST_PARAM);
port = params.getParameter(GridFSConstants.PORT_PARAM);
db = params.getParameter(GridFSConstants.DB_PARAM);
bucket = params.getParameter(GridFSConstants.BUCKET_PARAM);
url = params.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
acl = params.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
denyAcl = params.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
}
/**
* Test the connection. Returns a string describing the connection
* integrity.
*
* @return the connection's status as a displayable string.
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public String check() throws ManifoldCFException {
try {
getSession();
if (session != null) {
Mongo currentMongoSession = session.getMongo();
currentMongoSession.getConnector()
.getDBPortPool(currentMongoSession.getAddress())
.get()
.ensureOpen();
session.getMongo().close();
session = null;
return super.check();
}
return "Not connected.";
} catch (ManifoldCFException e) {
return e.getMessage();
} catch (IOException ex) {
return ex.getMessage();
}
}
/**
* Close the connection. Call this before discarding this instance of the
* repository connector.
*
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public void disconnect() throws ManifoldCFException {
if (session != null) {
try {
session.getMongo().close();
} catch (Exception e) {
Logging.connectors.error("GridFS: Error when trying to disconnect: " + e.getMessage());
throw new ManifoldCFException("GridFS: Error when trying to disconnect: " + e.getMessage(), e);
}
session = null;
lastSessionFetch = -1L;
username = null;
password = null;
host = null;
port = null;
db = null;
bucket = null;
url = null;
acl = null;
denyAcl = null;
}
}
/**
* This method is periodically called for all connectors that are connected
* but not in active use.
*
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public void poll() throws ManifoldCFException {
if (lastSessionFetch == -1L) {
return;
}
long currentTime = System.currentTimeMillis();
if (currentTime >= lastSessionFetch + SESSION_EXPIRATION_MILLISECONDS) {
if (session != null) {
session.getMongo().close();
session = null;
}
lastSessionFetch = -1L;
}
}
/**
* This method is called to assess whether to count this connector instance
* should actually be counted as being connected.
*
* @return true if the connector instance is actually connected.
*/
@Override
public boolean isConnected() {
if (session == null) {
return false;
}
Mongo currentMongoSession = session.getMongo();
DBTCPConnector currentTCPConnection = currentMongoSession.getConnector();
return currentTCPConnection.isOpen();
}
/**
* Get the maximum number of documents to amalgamate together into one
* batch, for this connector.
*
* @return the maximum number. 0 indicates "unlimited".
*/
@Override
public int getMaxDocumentRequest() {
return super.getMaxDocumentRequest();
}
/**
* Return the list of relationship types that this connector recognizes.
*
* @return the list.
*/
@Override
public String[] getRelationshipTypes() {
return super.getRelationshipTypes();
}
/** Queue "seed" documents. Seed documents are the starting places for crawling activity. Documents
* are seeded when this method calls appropriate methods in the passed in ISeedingActivity object.
*
* This method can choose to find repository changes that happen only during the specified time interval.
* The seeds recorded by this method will be viewed by the framework based on what the
* getConnectorModel() method returns.
*
* It is not a big problem if the connector chooses to create more seeds than are
* strictly necessary; it is merely a question of overall work required.
*
* The end time and seeding version string passed to this method may be interpreted for greatest efficiency.
* For continuous crawling jobs, this method will
* be called once, when the job starts, and at various periodic intervals as the job executes.
*
* When a job's specification is changed, the framework automatically resets the seeding version string to null. The
* seeding version string may also be set to null on each job run, depending on the connector model returned by
* getConnectorModel().
*
* Note that it is always ok to send MORE documents rather than less to this method.
* The connector will be connected before this method can be called.
*@param activities is the interface this method should use to perform whatever framework actions are desired.
*@param spec is a document specification (that comes from the job).
*@param seedTime is the end of the time range of documents to consider, exclusive.
*@param lastSeedVersion is the last seeding version string for this job, or null if the job has no previous seeding version string.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@return an updated seeding version string, to be stored with the job.
*/
@Override
public String addSeedDocuments(ISeedingActivity activities, Specification spec,
String lastSeedVersion, long seedTime, int jobMode)
throws ManifoldCFException, ServiceInterruption {
getSession();
DBCollection fsFiles = session.getCollection(
bucket + GridFSConstants.COLLECTION_SEPERATOR + GridFSConstants.FILES_COLLECTION_NAME
);
DBCursor dnc = fsFiles.find();
while (dnc.hasNext()) {
DBObject dbo = dnc.next();
String _id = dbo.get("_id").toString();
activities.addSeedDocument(_id);
if (Logging.connectors.isDebugEnabled()) {
Logging.connectors.debug("GridFS: Document _id = " + _id + " added to queue");
}
}
return "";
}
/** Process a set of documents.
* This is the method that should cause each document to be fetched, processed, and the results either added
* to the queue of documents for the current job, and/or entered into the incremental ingestion manager.
* The document specification allows this class to filter what is done based on the job.
* The connector will be connected before this method can be called.
*@param documentIdentifiers is the set of document identifiers to process.
*@param statuses are the currently-stored document versions for each document in the set of document identifiers
* passed in above.
*@param activities is the interface this method should use to queue up new document references
* and ingest documents.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one.
*/
@Override
public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec,
IProcessActivity activities, int jobMode, boolean usesDefaultAuthority)
throws ManifoldCFException, ServiceInterruption {
for (String documentIdentifier : documentIdentifiers) {
String versionString;
GridFS gfs;
GridFSDBFile document;
getSession();
String _id = documentIdentifier;
gfs = new GridFS(session, bucket);
document = gfs.findOne(new ObjectId(_id));
if (document == null) {
activities.deleteDocument(documentIdentifier);
continue;
} else {
DBObject metadata = document.getMetaData();
versionString = document.getMD5() + "+" + metadata != null
? Integer.toString(metadata.hashCode())
: StringUtils.EMPTY;
}
if (versionString.length() == 0 || activities.checkDocumentNeedsReindexing(documentIdentifier, versionString)) {
long startTime = System.currentTimeMillis();
String errorCode = null;
String errorDesc = null;
String version = versionString;
try {
if (Logging.connectors.isDebugEnabled()) {
Logging.connectors.debug("GridFS: Processing document _id = " + _id);
}
DBObject metadata = document.getMetaData();
if (metadata == null) {
errorCode = "NULLMETADATA";
errorDesc = "Excluded because document had a null Metadata";
Logging.connectors.warn("GridFS: Document " + _id + " has a null metadata - skipping.");
activities.noDocument(_id, version);
continue;
}
String urlValue = document.getMetaData().get(this.url) == null
? StringUtils.EMPTY
: document.getMetaData().get(this.url).toString();
if (!StringUtils.isEmpty(urlValue)) {
boolean validURL;
try {
new java.net.URI(urlValue);
validURL = true;
} catch (java.net.URISyntaxException e) {
validURL = false;
}
if (validURL) {
long fileLenght = document.getLength();
Date createdDate = document.getUploadDate();
String fileName = document.getFilename();
String mimeType = document.getContentType();
if (!activities.checkURLIndexable(urlValue)) {
Logging.connectors.warn("GridFS: Document " + _id + " has a URL excluded by the output connector ('" + urlValue + "') - skipping.");
errorCode = activities.EXCLUDED_URL;
errorDesc = "Excluded because of URL (" + urlValue + ")";
activities.noDocument(_id, version);
continue;
}
if (!activities.checkLengthIndexable(fileLenght)) {
Logging.connectors.warn("GridFS: Document " + _id + " has a length excluded by the output connector (" + fileLenght + ") - skipping.");
errorCode = activities.EXCLUDED_LENGTH;
errorDesc = "Excluded because of length (" + fileLenght + ")";
activities.noDocument(_id, version);
continue;
}
if (!activities.checkMimeTypeIndexable(mimeType)) {
Logging.connectors.warn("GridFS: Document " + _id + " has a mime type excluded by the output connector ('" + mimeType + "') - skipping.");
errorCode = activities.EXCLUDED_MIMETYPE;
errorDesc = "Excluded because of mime type (" + mimeType + ")";
activities.noDocument(_id, version);
continue;
}
if (!activities.checkDateIndexable(createdDate)) {
Logging.connectors.warn("GridFS: Document " + _id + " has a date excluded by the output connector (" + createdDate + ") - skipping.");
errorCode = activities.EXCLUDED_DATE;
errorDesc = "Excluded because of date (" + createdDate + ")";
activities.noDocument(_id, version);
continue;
}
RepositoryDocument rd = new RepositoryDocument();
rd.setCreatedDate(createdDate);
rd.setModifiedDate(createdDate);
rd.setFileName(fileName);
rd.setMimeType(mimeType);
String[] aclsArray = null;
String[] denyAclsArray = null;
if (acl != null) {
try {
Object aclObject = document.getMetaData().get(acl);
if (aclObject != null) {
List<String> acls = (List<String>) aclObject;
aclsArray = (String[]) acls.toArray();
}
} catch (ClassCastException e) {
// This is bad because security will fail
Logging.connectors.warn("GridFS: Document " + _id + " metadata ACL field doesn't contain List<String> type.");
errorCode = "ACLTYPE";
errorDesc = "Allow ACL field doesn't contain List<String> type.";
throw new ManifoldCFException("Security decoding error: " + e.getMessage(), e);
}
}
if (denyAcl != null) {
try {
Object denyAclObject = document.getMetaData().get(denyAcl);
if (denyAclObject != null) {
List<String> denyAcls = (List<String>) denyAclObject;
denyAcls.add(GLOBAL_DENY_TOKEN);
denyAclsArray = (String[]) denyAcls.toArray();
}
} catch (ClassCastException e) {
// This is bad because security will fail
Logging.connectors.warn("GridFS: Document " + _id + " metadata DenyACL field doesn't contain List<String> type.");
errorCode = "ACLTYPE";
errorDesc = "Deny ACL field doesn't contain List<String> type.";
throw new ManifoldCFException("Security decoding error: " + e.getMessage(), e);
}
}
rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT, aclsArray, denyAclsArray);
InputStream is = document.getInputStream();
try {
rd.setBinary(is, fileLenght);
try {
activities.ingestDocumentWithException(_id, version, urlValue, rd);
} catch (IOException e) {
handleIOException(e);
}
} finally {
try {
is.close();
} catch (IOException e) {
handleIOException(e);
}
}
gfs.getDB().getMongo().close();
session = null;
errorCode = "OK";
} else {
Logging.connectors.warn("GridFS: Document " + _id + " has a invalid URL: " + urlValue + " - skipping.");
errorCode = activities.BAD_URL;
errorDesc = "Excluded because document had illegal URL ('" + urlValue + "')";
activities.noDocument(_id, version);
}
} else {
Logging.connectors.warn("GridFS: Document " + _id + " has a null URL - skipping.");
errorCode = activities.NULL_URL;
errorDesc = "Excluded because document had a null URL.";
activities.noDocument(_id, version);
}
} finally {
if (errorCode != null) {
activities.recordActivity(startTime, ACTIVITY_FETCH, document.getLength(), _id, errorCode, errorDesc, null);
}
}
}
}
}
protected static void handleIOException(IOException e) throws ManifoldCFException, ServiceInterruption {
if (e instanceof InterruptedIOException) {
throw new ManifoldCFException(e.getMessage(), e, ManifoldCFException.INTERRUPTED);
} else {
throw new ManifoldCFException(e.getMessage(), e);
}
}
/**
* Output the configuration header section. This method is called in the
* head section of the connector's configuration page. Its purpose is to add
* the required tabs to the list, and to output any javascript methods that
* might be needed by the configuration editing HTML. The connector does not
* need to be connected for this method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabsArray is an array of tab names. Add to this array any tab
* names that are specific to the connector.
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
* @throws java.io.IOException
*/
@Override
public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException {
tabsArray.add(Messages.getString(locale, GRIDFS_SERVER_TAB_RESOURCE));
Map<String, String> paramMap = new HashMap<String, String>();
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIG_HEADER_FORWARD, paramMap, true);
}
/**
* Output the configuration body section. This method is called in the body
* section of the connector's configuration page. Its purpose is to present
* the required form elements for editing. The coder can presume that the
* HTML that is output from this configuration will be within appropriate
* <html>, <body>, and <form> tags. The name of the form is always
* "editconnection". The connector does not need to be connected for this
* method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabName is the current tab name.
*/
@Override
public void outputConfigurationBody(IThreadContext threadContext,
IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException {
Map<String, String> paramMap = new HashMap<String, String>();
paramMap.put(TAB_NAME_PARAM, tabName);
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIG_FORWARD_SERVER, paramMap, true);
}
/**
* Process a configuration post. This method is called at the start of the
* connector's configuration page, whenever there is a possibility that form
* data for a connection has been posted. Its purpose is to gather form
* information and modify the configuration parameters accordingly. The name
* of the posted form is always "editconnection". The connector does not
* need to be connected for this method to be called.
*
* @param threadContext is the local thread context.
* @param variableContext is the set of variables available from the post,
* including binary file post information.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @return null if all is well, or a string error message if there is an
* error that should prevent saving of the connection (and cause a
* redirection to an error page).
*/
@Override
public String processConfigurationPost(IThreadContext threadContext,
IPostParameters variableContext, Locale locale, ConfigParams parameters)
throws ManifoldCFException {
String username = variableContext.getParameter(GridFSConstants.USERNAME_PARAM);
if (username != null) {
parameters.setParameter(GridFSConstants.USERNAME_PARAM, username);
}
String password = variableContext.getParameter(GridFSConstants.PASSWORD_PARAM);
if (password != null) {
parameters.setObfuscatedParameter(GridFSConstants.PASSWORD_PARAM, variableContext.mapKeyToPassword(password));
}
String db = variableContext.getParameter(GridFSConstants.DB_PARAM);
if (db != null) {
parameters.setParameter(GridFSConstants.DB_PARAM, db);
}
String bucket = variableContext.getParameter(GridFSConstants.BUCKET_PARAM);
if (bucket != null) {
parameters.setParameter(GridFSConstants.BUCKET_PARAM, bucket);
}
String port = variableContext.getParameter(GridFSConstants.PORT_PARAM);
if (port != null) {
parameters.setParameter(GridFSConstants.PORT_PARAM, port);
}
String host = variableContext.getParameter(GridFSConstants.HOST_PARAM);
if (host != null) {
parameters.setParameter(GridFSConstants.HOST_PARAM, host);
}
String url = variableContext.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
if (url != null) {
parameters.setParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, url);
}
String acl = variableContext.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
if (acl != null) {
parameters.setParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM, acl);
}
String denyAcl = variableContext.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
if (denyAcl != null) {
parameters.setParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM, denyAcl);
}
return null;
}
/**
* View configuration. This method is called in the body section of the
* connector's view configuration page. Its purpose is to present the
* connection information to the user. The coder can presume that the HTML
* that is output from this configuration will be within appropriate <html>
* and <body> tags. The connector does not need to be connected for this
* method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
*/
@Override
public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException {
Map<String, String> paramMap = new HashMap<String, String>();
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, VIEW_CONFIG_FORWARD, paramMap, true);
}
/**
* Setup a session.
*
* @throws ManifoldCFException
*/
protected void getSession() throws ManifoldCFException {
if (session == null) {
if (StringUtils.isEmpty(db) || StringUtils.isEmpty(bucket)) {
throw new ManifoldCFException("GridFS: Database or bucket name cannot be empty.");
}
if (StringUtils.isEmpty(url)) {
throw new ManifoldCFException("GridFS: Metadata URL field cannot be empty.");
}
if (StringUtils.isEmpty(host) && StringUtils.isEmpty(port)) {
try {
session = new MongoClient().getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Default host is not found. Does mongod process run?" + ex.getMessage(), ex);
}
} else if (!StringUtils.isEmpty(host) && StringUtils.isEmpty(port)) {
try {
session = new MongoClient(host).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given host information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
}
} else if (!StringUtils.isEmpty(host) && !StringUtils.isEmpty(port)) {
try {
int integerPort = Integer.parseInt(port);
session = new MongoClient(host, integerPort).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
} catch (NumberFormatException ex) {
throw new ManifoldCFException("GridFS: Given port is not valid number. " + ex.getMessage(), ex);
}
} else if (StringUtils.isEmpty(host) && !StringUtils.isEmpty(port)) {
try {
int integerPort = Integer.parseInt(port);
session = new MongoClient(host, integerPort).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
} catch (NumberFormatException ex) {
throw new ManifoldCFException("GridFS: Given port is not valid number. " + ex.getMessage(), ex);
}
}
if (!StringUtils.isEmpty(username) && !StringUtils.isEmpty(password)) {
boolean auth = session.authenticate(username, password.toCharArray());
if (!auth) {
throw new ManifoldCFException("GridFS: Given database username and password doesn't match.");
}
}
lastSessionFetch = System.currentTimeMillis();
}
}
/**
* Fill in a Server tab configuration parameter map for calling a Velocity
* template.
*
* @param paramMap is the map to fill in
* @param parameters is the current set of configuration parameters
*/
public void fillInServerParameters(Map<String, String> paramMap, IPasswordMapperActivity mapper, ConfigParams parameters) {
String usernameParam = parameters.getParameter(GridFSConstants.USERNAME_PARAM);
paramMap.put(GridFSConstants.USERNAME_PARAM, usernameParam);
String passwordParam = parameters.getObfuscatedParameter(GridFSConstants.PASSWORD_PARAM);
if (passwordParam == null) {
passwordParam = StringUtils.EMPTY;
} else {
passwordParam = mapper.mapPasswordToKey(passwordParam);
}
paramMap.put(GridFSConstants.PASSWORD_PARAM, passwordParam);
String dbParam = parameters.getParameter(GridFSConstants.DB_PARAM);
if (StringUtils.isEmpty(dbParam)) {
dbParam = GridFSConstants.DEFAULT_DB_NAME;
}
paramMap.put(GridFSConstants.DB_PARAM, dbParam);
String bucketParam = parameters.getParameter(GridFSConstants.BUCKET_PARAM);
if (StringUtils.isEmpty(bucketParam)) {
bucketParam = GridFSConstants.DEFAULT_BUCKET_NAME;
}
paramMap.put(GridFSConstants.BUCKET_PARAM, bucketParam);
String hostParam = parameters.getParameter(GridFSConstants.HOST_PARAM);
paramMap.put(GridFSConstants.HOST_PARAM, hostParam);
String portParam = parameters.getParameter(GridFSConstants.PORT_PARAM);
paramMap.put(GridFSConstants.PORT_PARAM, portParam);
String urlParam = parameters.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, urlParam);
String aclParam = parameters.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM, aclParam);
String denyAclParam = parameters.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM, denyAclParam);
}
/**
* Special column names, as far as document queries are concerned
*/
protected static HashMap documentKnownColumns;
static {
documentKnownColumns = new HashMap();
documentKnownColumns.put(GridFSConstants.DEFAULT_ID_FIELD_NAME, "");
documentKnownColumns.put(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, "");
}
/**
* Apply metadata to a repository document.
*
* @param rd is the repository document to apply the metadata to.
* @param metadataMap is the resultset row to use to get the metadata. All
* non-special columns from this row will be considered to be metadata.
*/
protected void applyMetadata(RepositoryDocument rd, DBObject metadataMap)
throws ManifoldCFException {
// Cycle through the document's fields
Iterator iter = metadataMap.keySet().iterator();
while (iter.hasNext()) {
String fieldName = (String) iter.next();
if (documentKnownColumns.get(fieldName) == null) {
// Consider this field to contain metadata.
// We can only accept non-binary metadata at this time.
Object metadata = metadataMap.get(fieldName);
if (!(metadata instanceof String)) {
throw new ManifoldCFException("Metadata field '" + fieldName + "' must be convertible to a string.");
}
rd.addField(fieldName, metadata.toString());
}
}
}
}
|
apache/polaris | 37,475 | integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisPolicyServiceIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.polaris.service.it.test;
import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
import static org.apache.polaris.service.it.env.PolarisClient.polarisClient;
import static org.assertj.core.api.Assertions.assertThat;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.Response;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URI;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Stream;
import org.apache.iceberg.Schema;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.rest.RESTCatalog;
import org.apache.iceberg.rest.RESTUtil;
import org.apache.iceberg.types.Types;
import org.apache.polaris.core.admin.model.AwsStorageConfigInfo;
import org.apache.polaris.core.admin.model.Catalog;
import org.apache.polaris.core.admin.model.CatalogGrant;
import org.apache.polaris.core.admin.model.CatalogPrivilege;
import org.apache.polaris.core.admin.model.CatalogProperties;
import org.apache.polaris.core.admin.model.CatalogRole;
import org.apache.polaris.core.admin.model.FileStorageConfigInfo;
import org.apache.polaris.core.admin.model.GrantResource;
import org.apache.polaris.core.admin.model.GrantResources;
import org.apache.polaris.core.admin.model.NamespaceGrant;
import org.apache.polaris.core.admin.model.NamespacePrivilege;
import org.apache.polaris.core.admin.model.PolarisCatalog;
import org.apache.polaris.core.admin.model.PolicyGrant;
import org.apache.polaris.core.admin.model.PolicyPrivilege;
import org.apache.polaris.core.admin.model.PrincipalWithCredentials;
import org.apache.polaris.core.admin.model.StorageConfigInfo;
import org.apache.polaris.core.admin.model.TableGrant;
import org.apache.polaris.core.admin.model.TablePrivilege;
import org.apache.polaris.core.catalog.PolarisCatalogHelpers;
import org.apache.polaris.core.entity.CatalogEntity;
import org.apache.polaris.core.policy.PredefinedPolicyTypes;
import org.apache.polaris.core.policy.exceptions.PolicyInUseException;
import org.apache.polaris.service.it.env.CatalogConfig;
import org.apache.polaris.service.it.env.ClientCredentials;
import org.apache.polaris.service.it.env.IcebergHelper;
import org.apache.polaris.service.it.env.IntegrationTestsHelper;
import org.apache.polaris.service.it.env.ManagementApi;
import org.apache.polaris.service.it.env.PolarisApiEndpoints;
import org.apache.polaris.service.it.env.PolarisClient;
import org.apache.polaris.service.it.env.PolicyApi;
import org.apache.polaris.service.it.env.RestCatalogConfig;
import org.apache.polaris.service.it.ext.PolarisIntegrationTestExtension;
import org.apache.polaris.service.types.ApplicablePolicy;
import org.apache.polaris.service.types.AttachPolicyRequest;
import org.apache.polaris.service.types.CreatePolicyRequest;
import org.apache.polaris.service.types.DetachPolicyRequest;
import org.apache.polaris.service.types.Policy;
import org.apache.polaris.service.types.PolicyAttachmentTarget;
import org.apache.polaris.service.types.PolicyIdentifier;
import org.apache.polaris.service.types.UpdatePolicyRequest;
import org.assertj.core.api.Assertions;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
@ExtendWith(PolarisIntegrationTestExtension.class)
public class PolarisPolicyServiceIntegrationTest {
private static final String TEST_ROLE_ARN =
Optional.ofNullable(System.getenv("INTEGRATION_TEST_ROLE_ARN"))
.orElse("arn:aws:iam::123456789012:role/my-role");
private static final String CATALOG_ROLE_1 = "catalogrole1";
private static final String CATALOG_ROLE_2 = "catalogrole2";
private static final String EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT = "{\"enable\":true}";
private static final Namespace NS1 = Namespace.of("NS1");
private static final Namespace NS2 = Namespace.of("NS2");
private static final PolicyIdentifier NS1_P1 = new PolicyIdentifier(NS1, "P1");
private static final PolicyIdentifier NS1_P2 = new PolicyIdentifier(NS1, "P2");
private static final PolicyIdentifier NS1_P3 = new PolicyIdentifier(NS1, "P3");
private static final TableIdentifier NS2_T1 = TableIdentifier.of(NS2, "T1");
private static final String NS1_NAME = RESTUtil.encodeNamespace(NS1);
private static final String INVALID_NAMESPACE = "INVALID_NAMESPACE";
private static final String INVALID_POLICY = "INVALID_POLICY";
private static final String INVALID_TABLE = "INVALID_TABLE";
private static final String INVALID_NAMESPACE_MSG =
"Namespace does not exist: " + INVALID_NAMESPACE;
private static URI s3BucketBase;
private static String principalRoleName;
private static String adminToken;
private static PolarisApiEndpoints endpoints;
private static PolarisClient client;
private static ManagementApi managementApi;
private static PolicyApi policyApi;
private RESTCatalog restCatalog;
private String currentCatalogName;
private final String catalogBaseLocation =
s3BucketBase + "/" + System.getenv("USER") + "/path/to/data";
private static final Map<String, String> DEFAULT_CATALOG_PROPERTIES =
Map.of(
"polaris.config.allow.unstructured.table.location", "true",
"polaris.config.allow.external.table.location", "true");
@BeforeAll
public static void setup(
PolarisApiEndpoints apiEndpoints, ClientCredentials credentials, @TempDir Path tempDir) {
endpoints = apiEndpoints;
client = polarisClient(endpoints);
adminToken = client.obtainToken(credentials);
managementApi = client.managementApi(adminToken);
String principalName = client.newEntityName("snowman-rest");
principalRoleName = client.newEntityName("rest-admin");
PrincipalWithCredentials principalCredentials =
managementApi.createPrincipalWithRole(principalName, principalRoleName);
URI testRootUri = IntegrationTestsHelper.getTemporaryDirectory(tempDir);
s3BucketBase = testRootUri.resolve("my-bucket");
String principalToken = client.obtainToken(principalCredentials);
policyApi = client.policyApi(principalToken);
}
@AfterAll
public static void close() throws Exception {
client.close();
}
@BeforeEach
public void before(TestInfo testInfo) {
String principalName = "snowman-rest-" + UUID.randomUUID();
principalRoleName = "rest-admin-" + UUID.randomUUID();
PrincipalWithCredentials principalCredentials =
managementApi.createPrincipalWithRole(principalName, principalRoleName);
Method method = testInfo.getTestMethod().orElseThrow();
currentCatalogName = client.newEntityName(method.getName());
AwsStorageConfigInfo awsConfigModel =
AwsStorageConfigInfo.builder()
.setStorageType(StorageConfigInfo.StorageTypeEnum.S3)
.setAllowedLocations(List.of("s3://my-old-bucket/path/to/data"))
.build();
CatalogProperties.Builder catalogPropsBuilder = CatalogProperties.builder(catalogBaseLocation);
Map<String, String> catalogProperties =
IntegrationTestsHelper.mergeFromAnnotatedElements(
testInfo, CatalogConfig.class, CatalogConfig::properties, DEFAULT_CATALOG_PROPERTIES);
catalogPropsBuilder.putAll(catalogProperties);
if (!s3BucketBase.getScheme().equals("file")) {
catalogPropsBuilder.addProperty(
CatalogEntity.REPLACE_NEW_LOCATION_PREFIX_WITH_CATALOG_DEFAULT_KEY, "file:");
}
Catalog.TypeEnum catalogType =
IntegrationTestsHelper.extractFromAnnotatedElements(
testInfo, CatalogConfig.class, CatalogConfig::value, Catalog.TypeEnum.INTERNAL);
Catalog catalog =
PolarisCatalog.builder()
.setType(catalogType)
.setName(currentCatalogName)
.setProperties(catalogPropsBuilder.build())
.setStorageConfigInfo(
s3BucketBase.getScheme().equals("file")
? new FileStorageConfigInfo(
StorageConfigInfo.StorageTypeEnum.FILE, List.of("file://"))
: awsConfigModel)
.build();
managementApi.createCatalog(principalRoleName, catalog);
Map<String, String> restCatalogProperties =
IntegrationTestsHelper.mergeFromAnnotatedElements(
testInfo, RestCatalogConfig.class, RestCatalogConfig::value, Map.of());
String principalToken = client.obtainToken(principalCredentials);
restCatalog =
IcebergHelper.restCatalog(
endpoints, currentCatalogName, restCatalogProperties, principalToken);
CatalogGrant catalogGrant =
new CatalogGrant(CatalogPrivilege.CATALOG_MANAGE_CONTENT, GrantResource.TypeEnum.CATALOG);
managementApi.createCatalogRole(currentCatalogName, CATALOG_ROLE_1);
managementApi.addGrant(currentCatalogName, CATALOG_ROLE_1, catalogGrant);
CatalogRole catalogRole = managementApi.getCatalogRole(currentCatalogName, CATALOG_ROLE_1);
managementApi.grantCatalogRoleToPrincipalRole(
principalRoleName, currentCatalogName, catalogRole);
policyApi = client.policyApi(principalToken);
}
@AfterEach
public void cleanUp() throws IOException {
try {
if (restCatalog != null) {
restCatalog.close();
}
} finally {
client.cleanUp(adminToken);
}
}
@Test
public void testCreatePolicy() {
restCatalog.createNamespace(NS1);
Policy policy =
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
Assertions.assertThat(policy).isNotNull();
Assertions.assertThat(policy.getName()).isEqualTo("P1");
Assertions.assertThat(policy.getDescription()).isEqualTo("test policy");
Assertions.assertThat(policy.getPolicyType())
.isEqualTo(PredefinedPolicyTypes.DATA_COMPACTION.getName());
Assertions.assertThat(policy.getContent()).isEqualTo(EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT);
Assertions.assertThat(policy.getInheritable())
.isEqualTo(PredefinedPolicyTypes.DATA_COMPACTION.isInheritable());
Assertions.assertThat(policy.getVersion()).isEqualTo(0);
Policy loadedPolicy = policyApi.loadPolicy(currentCatalogName, NS1_P1);
Assertions.assertThat(loadedPolicy).isEqualTo(policy);
policyApi.dropPolicy(currentCatalogName, NS1_P1);
}
@ParameterizedTest
@ValueSource(
strings = {
" invalid",
"invalid ",
" invalid ",
"",
"policy name",
"policy@name",
"policy#name",
"policy$name",
"policy!name",
"policy name with space",
"policy.name",
"policy,name",
"policy~name",
"policy`name",
"policy;name",
"policy:name",
"policy<>name",
"policy[]name",
"policy{}name",
"policy|name",
"policy\\name",
"policy/name",
"policy*name",
"policy^name",
"policy%name",
})
public void testCreatePolicyWithInvalidName(String policyName) {
restCatalog.createNamespace(NS1);
PolicyIdentifier policyIdentifier = new PolicyIdentifier(NS1, policyName);
String ns = RESTUtil.encodeNamespace(policyIdentifier.getNamespace());
CreatePolicyRequest request =
CreatePolicyRequest.builder()
.setType(PredefinedPolicyTypes.DATA_COMPACTION.getName())
.setName(policyIdentifier.getName())
.setDescription("test policy")
.setContent(EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT)
.build();
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies",
Map.of("cat", currentCatalogName, "ns", ns))
.post(Entity.json(request))) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.BAD_REQUEST.getStatusCode());
Assertions.assertThat(res.readEntity(String.class))
.contains(
"{\"error\":{\"message\":\"Invalid value: createPolicy.arg2.name: must match \\\"^[A-Za-z0-9\\\\-_]+$\\\"\",\"type\":\"ResteasyReactiveViolationException\",\"code\":400}}");
}
}
@Test
public void testCreatePolicyWithNonExistingNamespace() {
CreatePolicyRequest request =
CreatePolicyRequest.builder()
.setType(PredefinedPolicyTypes.DATA_COMPACTION.getName())
.setName(currentCatalogName)
.setDescription("test policy")
.setContent(EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT)
.build();
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies",
Map.of("cat", currentCatalogName, "ns", INVALID_NAMESPACE))
.post(Entity.json(request))) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class)).contains(INVALID_NAMESPACE_MSG);
}
}
@Test
public void testAttachPolicyToNonExistingNamespace() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
Namespace invalidNamespace = Namespace.of(INVALID_NAMESPACE);
var invalidTarget =
new PolicyAttachmentTarget(
PolicyAttachmentTarget.TypeEnum.NAMESPACE, List.of(invalidNamespace.levels()));
AttachPolicyRequest request =
AttachPolicyRequest.builder().setTarget(invalidTarget).setParameters(Map.of()).build();
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies/{policy-name}/mappings",
Map.of("cat", currentCatalogName, "ns", NS1_NAME, "policy-name", NS1_P1.getName()))
.put(Entity.json(request))) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class)).contains(INVALID_NAMESPACE_MSG);
}
policyApi.dropPolicy(currentCatalogName, NS1_P1);
}
@Test
public void testAttachPolicyToNonExistingTable() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
TableIdentifier invalidTable = TableIdentifier.of(NS1, INVALID_TABLE);
var invalidTarget =
new PolicyAttachmentTarget(
PolicyAttachmentTarget.TypeEnum.TABLE_LIKE,
List.of(invalidTable.toString().split("\\.")));
AttachPolicyRequest request =
AttachPolicyRequest.builder().setTarget(invalidTarget).setParameters(Map.of()).build();
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies/{policy-name}/mappings",
Map.of("cat", currentCatalogName, "ns", NS1_NAME, "policy-name", NS1_P1.getName()))
.put(Entity.json(request))) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class))
.contains("Table or view does not exist: " + NS1_NAME + "." + INVALID_TABLE);
}
policyApi.dropPolicy(currentCatalogName, NS1_P1);
}
@Test
public void testDetachPolicyFromNonExistingNamespace() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
Namespace invalidNamespace = Namespace.of(INVALID_NAMESPACE);
var invalidTarget =
new PolicyAttachmentTarget(
PolicyAttachmentTarget.TypeEnum.NAMESPACE, List.of(invalidNamespace.levels()));
DetachPolicyRequest request = DetachPolicyRequest.builder().setTarget(invalidTarget).build();
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies/{policy-name}/mappings",
Map.of("cat", currentCatalogName, "ns", NS1_NAME, "policy-name", NS1_P1.getName()))
.post(Entity.json(request))) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class)).contains(INVALID_NAMESPACE_MSG);
}
policyApi.dropPolicy(currentCatalogName, NS1_P1);
}
@Test
public void testDetachPolicyFromNonExistingTable() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
TableIdentifier invalidTable = TableIdentifier.of(NS1, INVALID_TABLE);
var invalidTarget =
new PolicyAttachmentTarget(
PolicyAttachmentTarget.TypeEnum.TABLE_LIKE,
List.of(invalidTable.toString().split("\\.")));
DetachPolicyRequest request = DetachPolicyRequest.builder().setTarget(invalidTarget).build();
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies/{policy-name}/mappings",
Map.of("cat", currentCatalogName, "ns", NS1_NAME, "policy-name", NS1_P1.getName()))
.post(Entity.json(request))) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class))
.contains("Table or view does not exist: " + NS1_NAME + "." + INVALID_TABLE);
}
policyApi.dropPolicy(currentCatalogName, NS1_P1);
}
@Test
public void testDropPolicy() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
PolicyAttachmentTarget catalogTarget =
PolicyAttachmentTarget.builder().setType(PolicyAttachmentTarget.TypeEnum.CATALOG).build();
policyApi.attachPolicy(currentCatalogName, NS1_P1, catalogTarget, Map.of());
// dropPolicy should fail because the policy is attached to the catalog
Assertions.assertThatThrownBy(() -> policyApi.dropPolicy(currentCatalogName, NS1_P1))
.isInstanceOf(PolicyInUseException.class);
// with detach-all=true, the policy and the attachment should be dropped
policyApi.dropPolicy(currentCatalogName, NS1_P1, true);
Assertions.assertThat(policyApi.listPolicies(currentCatalogName, NS1)).hasSize(0);
// The policy mapping record should be dropped
Assertions.assertThat(policyApi.getApplicablePolicies(currentCatalogName, null, null, null))
.hasSize(0);
}
@Test
public void testDropNonExistingPolicy() {
restCatalog.createNamespace(NS1);
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies/{policy}",
Map.of("cat", currentCatalogName, "ns", NS1_NAME, "policy", INVALID_POLICY))
.delete()) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class))
.contains(
"Policy does not exist: class PolicyIdentifier",
"namespace: " + NS1_NAME,
"name: " + INVALID_POLICY);
}
}
@Test
public void testUpdatePolicy() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
String updatedContent = "{\"enable\":false}";
String updatedDescription = "updated test policy";
Policy updatedPolicy =
policyApi.updatePolicy(currentCatalogName, NS1_P1, updatedContent, updatedDescription, 0);
Assertions.assertThat(updatedPolicy).isNotNull();
Assertions.assertThat(updatedPolicy.getName()).isEqualTo("P1");
Assertions.assertThat(updatedPolicy.getDescription()).isEqualTo(updatedDescription);
Assertions.assertThat(updatedPolicy.getPolicyType())
.isEqualTo(PredefinedPolicyTypes.DATA_COMPACTION.getName());
Assertions.assertThat(updatedPolicy.getContent()).isEqualTo(updatedContent);
Assertions.assertThat(updatedPolicy.getInheritable())
.isEqualTo(PredefinedPolicyTypes.DATA_COMPACTION.isInheritable());
Assertions.assertThat(updatedPolicy.getVersion()).isEqualTo(1);
policyApi.dropPolicy(currentCatalogName, NS1_P1);
}
@Test
public void testUpdateNonExistingPolicy() {
restCatalog.createNamespace(NS1);
UpdatePolicyRequest request =
UpdatePolicyRequest.builder()
.setContent("{\"enable\":false}")
.setDescription("updated test policy")
.build();
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies/{policy}",
Map.of("cat", currentCatalogName, "ns", NS1_NAME, "policy", INVALID_POLICY))
.put(Entity.json(request))) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class))
.contains(
"Policy does not exist: class PolicyIdentifier",
"namespace: " + NS1_NAME,
"name: " + INVALID_POLICY);
}
}
@Test
public void testListPolicies() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
policyApi.createPolicy(
currentCatalogName,
NS1_P2,
PredefinedPolicyTypes.METADATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
Assertions.assertThat(policyApi.listPolicies(currentCatalogName, NS1))
.containsExactlyInAnyOrder(NS1_P1, NS1_P2);
Assertions.assertThat(
policyApi.listPolicies(currentCatalogName, NS1, PredefinedPolicyTypes.DATA_COMPACTION))
.containsExactly(NS1_P1);
Assertions.assertThat(
policyApi.listPolicies(
currentCatalogName, NS1, PredefinedPolicyTypes.METADATA_COMPACTION))
.containsExactly(NS1_P2);
policyApi.dropPolicy(currentCatalogName, NS1_P1);
policyApi.dropPolicy(currentCatalogName, NS1_P2);
}
@Test
public void testListPoliciesOnNonExistingNamespace() {
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies",
Map.of("cat", currentCatalogName, "ns", INVALID_NAMESPACE))
.get()) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class)).contains(INVALID_NAMESPACE_MSG);
}
}
@Test
public void testGetApplicablePoliciesOnNonExistingNamespace() {
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/applicable-policies",
Map.of("cat", currentCatalogName),
Map.of("namespace", INVALID_NAMESPACE))
.get()) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class)).contains(INVALID_NAMESPACE_MSG);
}
}
@Test
public void testGetApplicablePoliciesOnNonExistingTable() {
restCatalog.createNamespace(NS1);
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/applicable-policies",
Map.of("cat", currentCatalogName),
Map.of("namespace", NS1_NAME, "target-name", INVALID_TABLE))
.get()) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class))
.contains("Table does not exist: " + NS1_NAME + "." + INVALID_TABLE);
}
policyApi.dropPolicy(currentCatalogName, NS1_P1);
}
@Test
public void testLoadNonExistingPolicy() {
restCatalog.createNamespace(NS1);
try (Response res =
policyApi
.request(
"polaris/v1/{cat}/namespaces/{ns}/policies/{policy}",
Map.of("cat", currentCatalogName, "ns", NS1_NAME, "policy", INVALID_POLICY))
.get()) {
Assertions.assertThat(res.getStatus()).isEqualTo(Response.Status.NOT_FOUND.getStatusCode());
Assertions.assertThat(res.readEntity(String.class))
.contains(
"Policy does not exist: class PolicyIdentifier",
"namespace: " + NS1_NAME,
"name: " + INVALID_POLICY);
}
}
@Test
public void testPolicyMapping() {
restCatalog.createNamespace(NS1);
restCatalog.createNamespace(NS2);
Policy p1 =
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
Policy p2 =
policyApi.createPolicy(
currentCatalogName,
NS1_P2,
PredefinedPolicyTypes.METADATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
Policy p3 =
policyApi.createPolicy(
currentCatalogName,
NS1_P3,
PredefinedPolicyTypes.ORPHAN_FILE_REMOVAL,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
restCatalog
.buildTable(
NS2_T1, new Schema(Types.NestedField.optional(1, "string", Types.StringType.get())))
.create();
PolicyAttachmentTarget catalogTarget =
PolicyAttachmentTarget.builder().setType(PolicyAttachmentTarget.TypeEnum.CATALOG).build();
PolicyAttachmentTarget namespaceTarget =
PolicyAttachmentTarget.builder()
.setType(PolicyAttachmentTarget.TypeEnum.NAMESPACE)
.setPath(Arrays.asList(NS2.levels()))
.build();
PolicyAttachmentTarget tableTarget =
PolicyAttachmentTarget.builder()
.setType(PolicyAttachmentTarget.TypeEnum.TABLE_LIKE)
.setPath(PolarisCatalogHelpers.tableIdentifierToList(NS2_T1))
.build();
policyApi.attachPolicy(currentCatalogName, NS1_P1, catalogTarget, Map.of());
policyApi.attachPolicy(currentCatalogName, NS1_P2, namespaceTarget, Map.of());
policyApi.attachPolicy(currentCatalogName, NS1_P3, tableTarget, Map.of());
List<ApplicablePolicy> applicablePoliciesOnCatalog =
policyApi.getApplicablePolicies(currentCatalogName, null, null, null);
Assertions.assertThat(applicablePoliciesOnCatalog)
.containsExactly(policyToApplicablePolicy(p1, false, NS1));
List<ApplicablePolicy> applicablePoliciesOnNamespace =
policyApi.getApplicablePolicies(currentCatalogName, NS2, null, null);
Assertions.assertThat(applicablePoliciesOnNamespace)
.containsExactlyInAnyOrder(
policyToApplicablePolicy(p1, true, NS1), policyToApplicablePolicy(p2, false, NS1));
List<ApplicablePolicy> applicablePoliciesOnTable =
policyApi.getApplicablePolicies(currentCatalogName, NS2, NS2_T1.name(), null);
Assertions.assertThat(applicablePoliciesOnTable)
.containsExactlyInAnyOrder(
policyToApplicablePolicy(p1, true, NS1),
policyToApplicablePolicy(p2, true, NS1),
policyToApplicablePolicy(p3, false, NS1));
Assertions.assertThat(
policyApi.getApplicablePolicies(
currentCatalogName, NS2, NS2_T1.name(), PredefinedPolicyTypes.METADATA_COMPACTION))
.containsExactlyInAnyOrder(policyToApplicablePolicy(p2, true, NS1));
policyApi.detachPolicy(currentCatalogName, NS1_P1, catalogTarget);
policyApi.detachPolicy(currentCatalogName, NS1_P2, namespaceTarget);
policyApi.detachPolicy(currentCatalogName, NS1_P3, tableTarget);
policyApi.dropPolicy(currentCatalogName, NS1_P1);
policyApi.dropPolicy(currentCatalogName, NS1_P2);
policyApi.dropPolicy(currentCatalogName, NS1_P3);
restCatalog.dropTable(NS2_T1);
}
@Test
public void testGrantsOnPolicy() {
restCatalog.createNamespace(NS1);
try {
policyApi.createPolicy(
currentCatalogName,
NS1_P1,
PredefinedPolicyTypes.DATA_COMPACTION,
EXAMPLE_TABLE_MAINTENANCE_POLICY_CONTENT,
"test policy");
managementApi.createCatalogRole(currentCatalogName, CATALOG_ROLE_2);
Stream<PolicyGrant> policyGrants =
Arrays.stream(PolicyPrivilege.values())
.map(
p ->
new PolicyGrant(
Arrays.asList(NS1.levels()),
NS1_P1.getName(),
p,
GrantResource.TypeEnum.POLICY));
policyGrants.forEach(g -> managementApi.addGrant(currentCatalogName, CATALOG_ROLE_2, g));
Assertions.assertThat(managementApi.listGrants(currentCatalogName, CATALOG_ROLE_2))
.extracting(GrantResources::getGrants)
.asInstanceOf(InstanceOfAssertFactories.list(GrantResource.class))
.map(gr -> ((PolicyGrant) gr).getPrivilege())
.containsExactlyInAnyOrder(PolicyPrivilege.values());
PolicyGrant policyReadGrant =
new PolicyGrant(
Arrays.asList(NS1.levels()),
NS1_P1.getName(),
PolicyPrivilege.POLICY_READ,
GrantResource.TypeEnum.POLICY);
managementApi.revokeGrant(currentCatalogName, CATALOG_ROLE_2, policyReadGrant);
Assertions.assertThat(managementApi.listGrants(currentCatalogName, CATALOG_ROLE_2))
.extracting(GrantResources::getGrants)
.asInstanceOf(InstanceOfAssertFactories.list(GrantResource.class))
.map(gr -> ((PolicyGrant) gr).getPrivilege())
.doesNotContain(PolicyPrivilege.POLICY_READ);
} finally {
policyApi.purge(currentCatalogName, NS1);
}
}
@Test
public void testGrantsOnNonExistingPolicy() {
restCatalog.createNamespace(NS1);
try {
managementApi.createCatalogRole(currentCatalogName, CATALOG_ROLE_2);
Stream<PolicyGrant> policyGrants =
Arrays.stream(PolicyPrivilege.values())
.map(
p ->
new PolicyGrant(
Arrays.asList(NS1.levels()),
NS1_P1.getName(),
p,
GrantResource.TypeEnum.POLICY));
policyGrants.forEach(
g -> {
try (Response response =
managementApi
.request(
"v1/catalogs/{cat}/catalog-roles/{role}/grants",
Map.of("cat", currentCatalogName, "role", "catalogrole2"))
.put(Entity.json(g))) {
assertThat(response.getStatus()).isEqualTo(NOT_FOUND.getStatusCode());
}
});
} finally {
policyApi.purge(currentCatalogName, NS1);
}
}
@Test
public void testGrantsOnNamespace() {
restCatalog.createNamespace(NS1);
try {
managementApi.createCatalogRole(currentCatalogName, CATALOG_ROLE_2);
List<NamespacePrivilege> policyPrivilegesOnNamespace =
List.of(
NamespacePrivilege.POLICY_LIST,
NamespacePrivilege.POLICY_CREATE,
NamespacePrivilege.POLICY_DROP,
NamespacePrivilege.POLICY_WRITE,
NamespacePrivilege.POLICY_READ,
NamespacePrivilege.POLICY_FULL_METADATA,
NamespacePrivilege.NAMESPACE_ATTACH_POLICY,
NamespacePrivilege.NAMESPACE_DETACH_POLICY);
Stream<NamespaceGrant> namespaceGrants =
policyPrivilegesOnNamespace.stream()
.map(
p ->
new NamespaceGrant(
Arrays.asList(NS1.levels()), p, GrantResource.TypeEnum.NAMESPACE));
namespaceGrants.forEach(g -> managementApi.addGrant(currentCatalogName, CATALOG_ROLE_2, g));
Assertions.assertThat(managementApi.listGrants(currentCatalogName, CATALOG_ROLE_2))
.extracting(GrantResources::getGrants)
.asInstanceOf(InstanceOfAssertFactories.list(GrantResource.class))
.map(gr -> ((NamespaceGrant) gr).getPrivilege())
.containsExactlyInAnyOrderElementsOf(policyPrivilegesOnNamespace);
} finally {
policyApi.purge(currentCatalogName, NS1);
}
}
@Test
public void testGrantsOnCatalog() {
managementApi.createCatalogRole(currentCatalogName, CATALOG_ROLE_2);
List<CatalogPrivilege> policyPrivilegesOnCatalog =
List.of(
CatalogPrivilege.POLICY_LIST,
CatalogPrivilege.POLICY_CREATE,
CatalogPrivilege.POLICY_DROP,
CatalogPrivilege.POLICY_WRITE,
CatalogPrivilege.POLICY_READ,
CatalogPrivilege.POLICY_FULL_METADATA,
CatalogPrivilege.CATALOG_ATTACH_POLICY,
CatalogPrivilege.CATALOG_DETACH_POLICY);
Stream<CatalogGrant> catalogGrants =
policyPrivilegesOnCatalog.stream()
.map(p -> new CatalogGrant(p, GrantResource.TypeEnum.CATALOG));
catalogGrants.forEach(g -> managementApi.addGrant(currentCatalogName, CATALOG_ROLE_2, g));
Assertions.assertThat(managementApi.listGrants(currentCatalogName, CATALOG_ROLE_2))
.extracting(GrantResources::getGrants)
.asInstanceOf(InstanceOfAssertFactories.list(GrantResource.class))
.map(gr -> ((CatalogGrant) gr).getPrivilege())
.containsExactlyInAnyOrderElementsOf(policyPrivilegesOnCatalog);
}
@Test
public void testGrantsOnTable() {
restCatalog.createNamespace(NS2);
try {
managementApi.createCatalogRole(currentCatalogName, CATALOG_ROLE_2);
restCatalog
.buildTable(
NS2_T1, new Schema(Types.NestedField.optional(1, "string", Types.StringType.get())))
.create();
List<TablePrivilege> policyPrivilegesOnTable =
List.of(TablePrivilege.TABLE_ATTACH_POLICY, TablePrivilege.TABLE_DETACH_POLICY);
Stream<TableGrant> tableGrants =
policyPrivilegesOnTable.stream()
.map(
p ->
new TableGrant(
Arrays.asList(NS2.levels()),
NS2_T1.name(),
p,
GrantResource.TypeEnum.TABLE));
tableGrants.forEach(g -> managementApi.addGrant(currentCatalogName, CATALOG_ROLE_2, g));
Assertions.assertThat(managementApi.listGrants(currentCatalogName, CATALOG_ROLE_2))
.extracting(GrantResources::getGrants)
.asInstanceOf(InstanceOfAssertFactories.list(GrantResource.class))
.map(gr -> ((TableGrant) gr).getPrivilege())
.containsExactlyInAnyOrderElementsOf(policyPrivilegesOnTable);
} finally {
policyApi.purge(currentCatalogName, NS2);
}
}
private static ApplicablePolicy policyToApplicablePolicy(
Policy policy, boolean inherited, Namespace parent) {
return new ApplicablePolicy(
policy.getPolicyType(),
policy.getInheritable(),
policy.getName(),
policy.getDescription(),
policy.getContent(),
policy.getVersion(),
inherited,
Arrays.asList(parent.levels()));
}
}
|
googleapis/google-cloud-java | 37,448 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MetricxSpec.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Spec for MetricX metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.MetricxSpec}
*/
public final class MetricxSpec extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.MetricxSpec)
MetricxSpecOrBuilder {
private static final long serialVersionUID = 0L;
// Use MetricxSpec.newBuilder() to construct.
private MetricxSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MetricxSpec() {
version_ = 0;
sourceLanguage_ = "";
targetLanguage_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MetricxSpec();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_MetricxSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_MetricxSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.MetricxSpec.class,
com.google.cloud.aiplatform.v1beta1.MetricxSpec.Builder.class);
}
/**
*
*
* <pre>
* MetricX Version options.
* </pre>
*
* Protobuf enum {@code google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion}
*/
public enum MetricxVersion implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* MetricX version unspecified.
* </pre>
*
* <code>METRICX_VERSION_UNSPECIFIED = 0;</code>
*/
METRICX_VERSION_UNSPECIFIED(0),
/**
*
*
* <pre>
* MetricX 2024 (2.6) for translation + reference (reference-based).
* </pre>
*
* <code>METRICX_24_REF = 1;</code>
*/
METRICX_24_REF(1),
/**
*
*
* <pre>
* MetricX 2024 (2.6) for translation + source (QE).
* </pre>
*
* <code>METRICX_24_SRC = 2;</code>
*/
METRICX_24_SRC(2),
/**
*
*
* <pre>
* MetricX 2024 (2.6) for translation + source + reference
* (source-reference-combined).
* </pre>
*
* <code>METRICX_24_SRC_REF = 3;</code>
*/
METRICX_24_SRC_REF(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* MetricX version unspecified.
* </pre>
*
* <code>METRICX_VERSION_UNSPECIFIED = 0;</code>
*/
public static final int METRICX_VERSION_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* MetricX 2024 (2.6) for translation + reference (reference-based).
* </pre>
*
* <code>METRICX_24_REF = 1;</code>
*/
public static final int METRICX_24_REF_VALUE = 1;
/**
*
*
* <pre>
* MetricX 2024 (2.6) for translation + source (QE).
* </pre>
*
* <code>METRICX_24_SRC = 2;</code>
*/
public static final int METRICX_24_SRC_VALUE = 2;
/**
*
*
* <pre>
* MetricX 2024 (2.6) for translation + source + reference
* (source-reference-combined).
* </pre>
*
* <code>METRICX_24_SRC_REF = 3;</code>
*/
public static final int METRICX_24_SRC_REF_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MetricxVersion valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MetricxVersion forNumber(int value) {
switch (value) {
case 0:
return METRICX_VERSION_UNSPECIFIED;
case 1:
return METRICX_24_REF;
case 2:
return METRICX_24_SRC;
case 3:
return METRICX_24_SRC_REF;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<MetricxVersion> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<MetricxVersion> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<MetricxVersion>() {
public MetricxVersion findValueByNumber(int number) {
return MetricxVersion.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.MetricxSpec.getDescriptor().getEnumTypes().get(0);
}
private static final MetricxVersion[] VALUES = values();
public static MetricxVersion valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MetricxVersion(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion)
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private int version_ = 0;
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for version.
*/
@java.lang.Override
public int getVersionValue() {
return version_;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion getVersion() {
com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion result =
com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion.forNumber(version_);
return result == null
? com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion.UNRECOGNIZED
: result;
}
public static final int SOURCE_LANGUAGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object sourceLanguage_ = "";
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sourceLanguage.
*/
@java.lang.Override
public java.lang.String getSourceLanguage() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceLanguage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for sourceLanguage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSourceLanguageBytes() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_LANGUAGE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object targetLanguage_ = "";
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The targetLanguage.
*/
@java.lang.Override
public java.lang.String getTargetLanguage() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetLanguage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for targetLanguage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTargetLanguageBytes() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceLanguage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, targetLanguage_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceLanguage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, targetLanguage_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.MetricxSpec)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.MetricxSpec other =
(com.google.cloud.aiplatform.v1beta1.MetricxSpec) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (version_ != other.version_) return false;
}
if (!getSourceLanguage().equals(other.getSourceLanguage())) return false;
if (!getTargetLanguage().equals(other.getTargetLanguage())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + version_;
}
hash = (37 * hash) + SOURCE_LANGUAGE_FIELD_NUMBER;
hash = (53 * hash) + getSourceLanguage().hashCode();
hash = (37 * hash) + TARGET_LANGUAGE_FIELD_NUMBER;
hash = (53 * hash) + getTargetLanguage().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.MetricxSpec prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Spec for MetricX metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.MetricxSpec}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.MetricxSpec)
com.google.cloud.aiplatform.v1beta1.MetricxSpecOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_MetricxSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_MetricxSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.MetricxSpec.class,
com.google.cloud.aiplatform.v1beta1.MetricxSpec.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.MetricxSpec.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = 0;
sourceLanguage_ = "";
targetLanguage_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_MetricxSpec_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.MetricxSpec getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.MetricxSpec.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.MetricxSpec build() {
com.google.cloud.aiplatform.v1beta1.MetricxSpec result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.MetricxSpec buildPartial() {
com.google.cloud.aiplatform.v1beta1.MetricxSpec result =
new com.google.cloud.aiplatform.v1beta1.MetricxSpec(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.MetricxSpec result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = version_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sourceLanguage_ = sourceLanguage_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.targetLanguage_ = targetLanguage_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.MetricxSpec) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.MetricxSpec) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.MetricxSpec other) {
if (other == com.google.cloud.aiplatform.v1beta1.MetricxSpec.getDefaultInstance())
return this;
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (!other.getSourceLanguage().isEmpty()) {
sourceLanguage_ = other.sourceLanguage_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getTargetLanguage().isEmpty()) {
targetLanguage_ = other.targetLanguage_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
version_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
sourceLanguage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
targetLanguage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int version_ = 0;
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for version.
*/
@java.lang.Override
public int getVersionValue() {
return version_;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for version to set.
* @return This builder for chaining.
*/
public Builder setVersionValue(int value) {
version_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion getVersion() {
com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion result =
com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion.forNumber(version_);
return result == null
? com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(
com.google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
version_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = 0;
onChanged();
return this;
}
private java.lang.Object sourceLanguage_ = "";
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sourceLanguage.
*/
public java.lang.String getSourceLanguage() {
java.lang.Object ref = sourceLanguage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceLanguage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for sourceLanguage.
*/
public com.google.protobuf.ByteString getSourceLanguageBytes() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The sourceLanguage to set.
* @return This builder for chaining.
*/
public Builder setSourceLanguage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sourceLanguage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSourceLanguage() {
sourceLanguage_ = getDefaultInstance().getSourceLanguage();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for sourceLanguage to set.
* @return This builder for chaining.
*/
public Builder setSourceLanguageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sourceLanguage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object targetLanguage_ = "";
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The targetLanguage.
*/
public java.lang.String getTargetLanguage() {
java.lang.Object ref = targetLanguage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetLanguage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for targetLanguage.
*/
public com.google.protobuf.ByteString getTargetLanguageBytes() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The targetLanguage to set.
* @return This builder for chaining.
*/
public Builder setTargetLanguage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
targetLanguage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetLanguage() {
targetLanguage_ = getDefaultInstance().getTargetLanguage();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for targetLanguage to set.
* @return This builder for chaining.
*/
public Builder setTargetLanguageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
targetLanguage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.MetricxSpec)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.MetricxSpec)
private static final com.google.cloud.aiplatform.v1beta1.MetricxSpec DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.MetricxSpec();
}
public static com.google.cloud.aiplatform.v1beta1.MetricxSpec getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MetricxSpec> PARSER =
new com.google.protobuf.AbstractParser<MetricxSpec>() {
@java.lang.Override
public MetricxSpec parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MetricxSpec> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MetricxSpec> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.MetricxSpec getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 37,552 | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemListStatus.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.azurebfs;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.mockito.stubbing.Stubber;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.azurebfs.constants.FSOperationType;
import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsDriverException;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsRestOperationException;
import org.apache.hadoop.fs.azurebfs.services.AbfsBlobClient;
import org.apache.hadoop.fs.azurebfs.services.AbfsClientHandler;
import org.apache.hadoop.fs.azurebfs.services.AbfsHttpHeader;
import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation;
import org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation;
import org.apache.hadoop.fs.azurebfs.services.AbfsRestOperationType;
import org.apache.hadoop.fs.azurebfs.services.ListResponseData;
import org.apache.hadoop.fs.azurebfs.services.AbfsClient;
import org.apache.hadoop.fs.azurebfs.services.AbfsClientTestUtil;
import org.apache.hadoop.fs.azurebfs.services.VersionedFileStatus;
import org.apache.hadoop.fs.azurebfs.utils.DirectoryStateHelper;
import org.apache.hadoop.fs.azurebfs.utils.TracingContext;
import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderFormat;
import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderValidator;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import static java.net.HttpURLConnection.HTTP_OK;
import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.EMPTY_STRING;
import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.HTTP_METHOD_PUT;
import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.ROOT_PATH;
import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.TRUE;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.AZURE_LIST_MAX_RESULTS;
import static org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations.X_MS_METADATA_PREFIX;
import static org.apache.hadoop.fs.azurebfs.services.AbfsErrors.ERR_BLOB_LIST_PARSING;
import static org.apache.hadoop.fs.azurebfs.services.RenameAtomicity.SUFFIX;
import static org.apache.hadoop.fs.azurebfs.services.RetryReasonConstants.CONNECTION_RESET_MESSAGE;
import static org.apache.hadoop.fs.azurebfs.services.RetryReasonConstants.CONNECTION_TIMEOUT_ABBREVIATION;
import static org.apache.hadoop.fs.azurebfs.services.RetryReasonConstants.CONNECTION_TIMEOUT_JDK_MESSAGE;
import static org.apache.hadoop.fs.contract.ContractTestUtils.assertMkdirs;
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
import static org.apache.hadoop.fs.contract.ContractTestUtils.assertPathExists;
import static org.apache.hadoop.fs.contract.ContractTestUtils.rename;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.when;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.jupiter.api.Assertions;
/**
* Test listStatus operation.
*/
public class ITestAzureBlobFileSystemListStatus extends
AbstractAbfsIntegrationTest {
private static final int TEST_FILES_NUMBER = 6000;
public static final String TEST_CONTINUATION_TOKEN = "continuation";
private static final int TOTAL_NUMBER_OF_PATHS = 11;
private static final int NUMBER_OF_UNIQUE_PATHS = 7;
public ITestAzureBlobFileSystemListStatus() throws Exception {
super();
}
@Test
public void testListPath() throws Exception {
Configuration config = new Configuration(this.getRawConfiguration());
config.set(AZURE_LIST_MAX_RESULTS, "5000");
final AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem
.newInstance(getFileSystem().getUri(), config);
final List<Future<Void>> tasks = new ArrayList<>();
ExecutorService es = Executors.newFixedThreadPool(10);
for (int i = 0; i < TEST_FILES_NUMBER; i++) {
final Path fileName = new Path("/test" + i);
Callable<Void> callable = new Callable<Void>() {
@Override
public Void call() throws Exception {
touch(fileName);
return null;
}
};
tasks.add(es.submit(callable));
}
for (Future<Void> task : tasks) {
task.get();
}
es.shutdownNow();
fs.registerListener(
new TracingHeaderValidator(getConfiguration().getClientCorrelationId(),
fs.getFileSystemId(), FSOperationType.LISTSTATUS, true, 0));
FileStatus[] files = fs.listStatus(new Path("/"));
assertEquals(TEST_FILES_NUMBER, files.length /* user directory */);
fs.registerListener(
new TracingHeaderValidator(getConfiguration().getClientCorrelationId(),
fs.getFileSystemId(), FSOperationType.GET_ATTR, true, 0));
fs.close();
}
/**
* Test to verify that each paginated call to ListBlobs uses a new tracing context.
* Test also verifies that the retry policy is called when a SocketTimeoutException
* Test also verifies that empty list with valid continuation token is handled.
* @throws Exception if there is an error or test assertions fails.
*/
@Test
public void testListPathTracingContext() throws Exception {
final AzureBlobFileSystem spiedFs = Mockito.spy(getFileSystem());
final AzureBlobFileSystemStore spiedStore = Mockito.spy(spiedFs.getAbfsStore());
final AbfsClient spiedClient = Mockito.spy(spiedFs.getAbfsClient());
final TracingContext spiedTracingContext = Mockito.spy(
new TracingContext(
spiedFs.getClientCorrelationId(), spiedFs.getFileSystemId(),
FSOperationType.LISTSTATUS, true, TracingHeaderFormat.ALL_ID_FORMAT, null));
Mockito.doReturn(spiedStore).when(spiedFs).getAbfsStore();
Mockito.doReturn(spiedClient).when(spiedStore).getClient();
spiedFs.setWorkingDirectory(new Path("/"));
AbfsClientTestUtil.setMockAbfsRestOperationForListOperation(spiedClient,
(httpOperation) -> {
Stubber stubber = Mockito.doThrow(
new SocketTimeoutException(CONNECTION_TIMEOUT_JDK_MESSAGE));
stubber.doNothing().when(httpOperation).processResponse(
nullable(byte[].class), nullable(int.class), nullable(int.class));
when(httpOperation.getStatusCode()).thenReturn(-1).thenReturn(HTTP_OK);
return httpOperation;
});
List<FileStatus> fileStatuses = new ArrayList<>();
spiedStore.listStatus(new Path("/"), "", fileStatuses, true, null, spiedTracingContext);
// Assert that there were retries due to SocketTimeoutException
Mockito.verify(spiedClient, Mockito.times(1))
.getRetryPolicy(CONNECTION_TIMEOUT_ABBREVIATION);
// Assert that there were 2 paginated ListPath calls were made 1 and 2.
// 1. Without continuation token
Mockito.verify(spiedClient, times(1)).listPath(
"/", false,
spiedFs.getAbfsStore().getAbfsConfiguration().getListMaxResults(),
null, spiedTracingContext, spiedFs.getAbfsStore().getUri());
// 2. With continuation token
Mockito.verify(spiedClient, times(1)).listPath(
"/", false,
spiedFs.getAbfsStore().getAbfsConfiguration().getListMaxResults(),
TEST_CONTINUATION_TOKEN, spiedTracingContext, spiedFs.getAbfsStore().getUri());
// Assert that none of the API calls used the same tracing header.
Mockito.verify(spiedTracingContext, times(0)).constructHeader(any(), any(), any());
}
@Test
public void testListPathParsingFailure() throws Exception {
assumeBlobServiceType();
AzureBlobFileSystem spiedFs = Mockito.spy(getFileSystem());
AzureBlobFileSystemStore spiedStore = Mockito.spy(spiedFs.getAbfsStore());
AbfsBlobClient spiedClient = Mockito.spy(spiedStore.getClientHandler()
.getBlobClient());
Mockito.doReturn(spiedStore).when(spiedFs).getAbfsStore();
Mockito.doReturn(spiedClient).when(spiedStore).getClient();
Mockito.doThrow(new SocketException(CONNECTION_RESET_MESSAGE)).when(spiedClient).filterRenamePendingFiles(any(), any());
List<FileStatus> fileStatuses = new ArrayList<>();
AbfsDriverException ex = intercept(AbfsDriverException.class,
() -> {
spiedStore.listStatus(new Path("/"), "", fileStatuses,
true, null, getTestTracingContext(spiedFs, true));
});
assertThat(ex.getStatusCode())
.describedAs("Expecting Network Error status code")
.isEqualTo(-1);
assertThat(ex.getErrorMessage())
.describedAs("Expecting COPY_ABORTED error code")
.contains(ERR_BLOB_LIST_PARSING);
}
/**
* Creates a file, verifies that listStatus returns it,
* even while the file is still open for writing.
*/
@Test
public void testListFileVsListDir() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path path = path("/testFile");
try(FSDataOutputStream ignored = fs.create(path)) {
FileStatus[] testFiles = fs.listStatus(path);
assertEquals(1, testFiles.length, "length of test files");
FileStatus status = testFiles[0];
assertIsFileReference(status);
}
}
@Test
public void testListFileVsListDir2() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path testFolder = path("/testFolder");
fs.mkdirs(testFolder);
fs.mkdirs(new Path(testFolder + "/testFolder2"));
fs.mkdirs(new Path(testFolder + "/testFolder2/testFolder3"));
Path testFile0Path = new Path(
testFolder + "/testFolder2/testFolder3/testFile");
ContractTestUtils.touch(fs, testFile0Path);
FileStatus[] testFiles = fs.listStatus(testFile0Path);
assertEquals(1, testFiles.length, "Wrong listing size of file " + testFile0Path);
FileStatus file0 = testFiles[0];
assertEquals(new Path(getTestUrl(), testFolder + "/testFolder2/testFolder3/testFile"),
file0.getPath(), "Wrong path for " + file0);
assertIsFileReference(file0);
}
@Test
public void testListNonExistentDir() throws Exception {
Assertions.assertThrows(FileNotFoundException.class, () -> {
final AzureBlobFileSystem fs = getFileSystem();
fs.listStatus(new Path("/testFile/"));
});
}
@Test
public void testListFiles() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path testDir = path("/test");
fs.mkdirs(testDir);
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
assertEquals(1, fileStatuses.length);
fs.mkdirs(new Path(testDir + "/sub"));
fileStatuses = fs.listStatus(testDir);
assertEquals(1, fileStatuses.length);
assertEquals("sub", fileStatuses[0].getPath().getName());
assertIsDirectoryReference(fileStatuses[0]);
Path childF = fs.makeQualified(new Path(testDir + "/f"));
touch(childF);
fileStatuses = fs.listStatus(testDir);
assertEquals(2, fileStatuses.length);
final FileStatus childStatus = fileStatuses[0];
assertEquals(childF, childStatus.getPath());
assertEquals("f", childStatus.getPath().getName());
assertIsFileReference(childStatus);
assertEquals(0, childStatus.getLen());
final FileStatus status1 = fileStatuses[1];
assertEquals("sub", status1.getPath().getName());
assertIsDirectoryReference(status1);
// look at the child through getFileStatus
LocatedFileStatus locatedChildStatus = fs.listFiles(childF, false).next();
assertIsFileReference(locatedChildStatus);
fs.delete(testDir, true);
intercept(FileNotFoundException.class,
() -> fs.listFiles(childF, false).next());
// do some final checks on the status (failing due to version checks)
assertEquals(childF, locatedChildStatus.getPath(), "Path mismatch of " + locatedChildStatus);
assertEquals(locatedChildStatus, childStatus, "locatedstatus.equals(status)");
assertEquals(childStatus, locatedChildStatus, "status.equals(locatedstatus)");
}
private void assertIsDirectoryReference(FileStatus status) {
assertTrue(status.isDirectory(), "Not a directory: " + status);
assertFalse(status.isFile(), "Not a directory: " + status);
assertEquals(0, status.getLen());
}
private void assertIsFileReference(FileStatus status) {
assertFalse(status.isDirectory(), "Not a file: " + status);
assertTrue(status.isFile(), "Not a file: " + status);
}
@Test
public void testMkdirTrailingPeriodDirName() throws IOException {
boolean exceptionThrown = false;
final AzureBlobFileSystem fs = getFileSystem();
Path nontrailingPeriodDir = path("testTrailingDir/dir");
Path trailingPeriodDir = new Path("testMkdirTrailingDir/dir.");
assertMkdirs(fs, nontrailingPeriodDir);
try {
fs.mkdirs(trailingPeriodDir);
}
catch(IllegalArgumentException e) {
exceptionThrown = true;
}
assertTrue(exceptionThrown, "Attempt to create file that ended with a dot should"
+ " throw IllegalArgumentException");
}
@Test
public void testCreateTrailingPeriodFileName() throws IOException {
boolean exceptionThrown = false;
final AzureBlobFileSystem fs = getFileSystem();
Path trailingPeriodFile = new Path("testTrailingDir/file.");
Path nontrailingPeriodFile = path("testCreateTrailingDir/file");
createFile(fs, nontrailingPeriodFile, false, new byte[0]);
assertPathExists(fs, "Trailing period file does not exist",
nontrailingPeriodFile);
try {
createFile(fs, trailingPeriodFile, false, new byte[0]);
}
catch(IllegalArgumentException e) {
exceptionThrown = true;
}
assertTrue(exceptionThrown, "Attempt to create file that ended with a dot should"
+ " throw IllegalArgumentException");
}
@Test
public void testRenameTrailingPeriodFile() throws IOException {
boolean exceptionThrown = false;
final AzureBlobFileSystem fs = getFileSystem();
Path nonTrailingPeriodFile = path("testTrailingDir/file");
Path trailingPeriodFile = new Path("testRenameTrailingDir/file.");
createFile(fs, nonTrailingPeriodFile, false, new byte[0]);
try {
rename(fs, nonTrailingPeriodFile, trailingPeriodFile);
}
catch(IllegalArgumentException e) {
exceptionThrown = true;
}
assertTrue(exceptionThrown, "Attempt to create file that ended with a dot should"
+ " throw IllegalArgumentException");
}
@Test
public void testEmptyListingInSubsequentCall() throws IOException {
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, true, EMPTY_STRING,
true, 1, 0);
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, true, EMPTY_STRING,
false, 1, 0);
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, true, TEST_CONTINUATION_TOKEN,
true, 1, 0);
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, true, TEST_CONTINUATION_TOKEN,
false, 1, 0);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN, true, EMPTY_STRING,
true, 2, 0);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN, true, EMPTY_STRING,
false, 2, 1);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN + 1, true, TEST_CONTINUATION_TOKEN + 2,
true, 3, 0);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN + 1, true, TEST_CONTINUATION_TOKEN + 2,
false, 3, 1);
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, false, EMPTY_STRING,
true, 1, 1);
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, false, EMPTY_STRING,
false, 1, 1);
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, false, TEST_CONTINUATION_TOKEN,
true, 1, 1);
testEmptyListingInSubsequentCallInternal(EMPTY_STRING, false, TEST_CONTINUATION_TOKEN,
false, 1, 1);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN, false, EMPTY_STRING,
true, 2, 1);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN, false, EMPTY_STRING,
false, 2, 2);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN + 1, false, TEST_CONTINUATION_TOKEN + 2,
true, 3, 1);
testEmptyListingInSubsequentCallInternal(TEST_CONTINUATION_TOKEN + 1, false, TEST_CONTINUATION_TOKEN + 2,
false, 3, 2);
}
private void testEmptyListingInSubsequentCallInternal(String firstCT,
boolean isfirstEmpty, String secondCT, boolean isSecondEmpty,
int expectedInvocations, int expectedSize) throws IOException {
assumeBlobServiceType();
AzureBlobFileSystem spiedFs = Mockito.spy(getFileSystem());
AzureBlobFileSystemStore spiedStore = Mockito.spy(spiedFs.getAbfsStore());
spiedStore.getAbfsConfiguration().setListMaxResults(1);
AbfsBlobClient spiedClient = Mockito.spy(spiedStore.getClientHandler().getBlobClient());
Mockito.doReturn(spiedStore).when(spiedFs).getAbfsStore();
Mockito.doReturn(spiedClient).when(spiedStore).getClient();
spiedFs.mkdirs(new Path("/testPath"));
VersionedFileStatus status1 = new VersionedFileStatus(
"owner", "group", null, false, 0, false, 0, 0, 0,
new Path("/testPath/file1"), "version", "encryptionContext");
VersionedFileStatus status2 = new VersionedFileStatus(
"owner", "group", null, false, 0, false, 0, 0, 0,
new Path("/testPath/file2"), "version", "encryptionContext");
List<VersionedFileStatus> mockedList1 = new ArrayList<>();
mockedList1.add(status1);
List<VersionedFileStatus> mockedList2 = new ArrayList<>();
mockedList2.add(status2);
ListResponseData listResponseData1 = new ListResponseData();
listResponseData1.setContinuationToken(firstCT);
listResponseData1.setFileStatusList(isfirstEmpty ? new ArrayList<>() : mockedList1);
listResponseData1.setOp(Mockito.mock(AbfsRestOperation.class));
ListResponseData listResponseData2 = new ListResponseData();
listResponseData2.setContinuationToken(secondCT);
listResponseData2.setFileStatusList(isSecondEmpty ? new ArrayList<>() : mockedList2);
listResponseData2.setOp(Mockito.mock(AbfsRestOperation.class));
ListResponseData listResponseData3 = new ListResponseData();
listResponseData3.setContinuationToken(EMPTY_STRING);
listResponseData3.setFileStatusList(new ArrayList<>());
listResponseData3.setOp(Mockito.mock(AbfsRestOperation.class));
final int[] itr = new int[1];
final String[] continuationTokenUsed = new String[3];
Mockito.doAnswer(invocationOnMock -> {
if (itr[0] == 0) {
itr[0]++;
continuationTokenUsed[0] = invocationOnMock.getArgument(3);
return listResponseData1;
} else if (itr[0] == 1) {
itr[0]++;
continuationTokenUsed[1] = invocationOnMock.getArgument(3);
return listResponseData2;
}
continuationTokenUsed[2] = invocationOnMock.getArgument(3);
return listResponseData3;
}).when(spiedClient).listPath(eq("/testPath"), eq(false), eq(1),
any(), any(TracingContext.class), any());
FileStatus[] list = spiedFs.listStatus(new Path("/testPath"));
Mockito.verify(spiedClient, times(expectedInvocations))
.listPath(eq("/testPath"), eq(false), eq(1),
any(), any(TracingContext.class), any());
Mockito.verify(spiedClient, times(1))
.postListProcessing(eq("/testPath"), any(), any(), any());
assertThat(list).hasSize(expectedSize);
if (expectedSize == 0) {
Mockito.verify(spiedClient, times(1))
.getPathStatus(eq("/testPath"), any(), eq(null), eq(false));
} else {
Mockito.verify(spiedClient, times(0))
.getPathStatus(eq("/testPath"), any(), eq(null), eq(false));
}
assertThat(continuationTokenUsed[0])
.describedAs("First continuation token used is not as expected")
.isNull();
if (expectedInvocations > 1) {
assertThat(continuationTokenUsed[1])
.describedAs("Second continuation token used is not as expected")
.isEqualTo(firstCT);
}
if (expectedInvocations > 2) {
assertThat(continuationTokenUsed[2])
.describedAs("Third continuation token used is not as expected")
.isEqualTo(secondCT);
}
}
/**
* Test to verify that listStatus returns the correct file status all types
* of paths viz. implicit, explicit, file.
* @throws Exception if there is an error or test assertions fails.
*/
@Test
public void testListStatusWithImplicitExplicitChildren() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
fs.setWorkingDirectory(new Path(ROOT_PATH));
Path root = new Path(ROOT_PATH);
// Create an implicit directory under root
Path dir = new Path("a");
Path fileInsideDir = new Path("a/file");
createAzCopyFolder(dir);
// Assert that implicit directory is returned
FileStatus[] fileStatuses = fs.listStatus(root);
assertThat(fileStatuses.length)
.describedAs("List size is not expected").isEqualTo(1);
assertImplicitDirectoryFileStatus(fileStatuses[0], fs.makeQualified(dir));
// Create a marker blob for the directory.
fs.create(fileInsideDir);
// Assert that only one entry of explicit directory is returned
fileStatuses = fs.listStatus(root);
assertThat(fileStatuses.length)
.describedAs("List size is not expected").isEqualTo(1);
assertExplicitDirectoryFileStatus(fileStatuses[0], fs.makeQualified(dir));
// Create a file under root
Path file1 = new Path("b");
fs.create(file1);
// Assert that two entries are returned in alphabetic order.
fileStatuses = fs.listStatus(root);
assertThat(fileStatuses.length)
.describedAs("List size is not expected").isEqualTo(2);
assertExplicitDirectoryFileStatus(fileStatuses[0], fs.makeQualified(dir));
assertFilePathFileStatus(fileStatuses[1], fs.makeQualified(file1));
// Create another implicit directory under root.
Path dir2 = new Path("c");
createAzCopyFolder(dir2);
// Assert that three entries are returned in alphabetic order.
fileStatuses = fs.listStatus(root);
assertThat(fileStatuses.length)
.describedAs("List size is not expected").isEqualTo(3);
assertExplicitDirectoryFileStatus(fileStatuses[0], fs.makeQualified(dir));
assertFilePathFileStatus(fileStatuses[1], fs.makeQualified(file1));
assertImplicitDirectoryFileStatus(fileStatuses[2], fs.makeQualified(dir2));
}
/**
* Test to verify that listStatus returns the correct file status when called on an implicit path
* @throws Exception if there is an error or test assertions fails.
*/
@Test
public void testListStatusOnImplicitDirectoryPath() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path implicitPath = new Path("/implicitDir");
createAzCopyFolder(implicitPath);
FileStatus[] statuses = fs.listStatus(implicitPath);
assertThat(statuses.length)
.describedAs("List size is not expected").isGreaterThanOrEqualTo(1);
assertImplicitDirectoryFileStatus(statuses[0], fs.makeQualified(statuses[0].getPath()));
FileStatus[] statuses1 = fs.listStatus(new Path(statuses[0].getPath().toString()));
assertThat(statuses1.length)
.describedAs("List size is not expected").isGreaterThanOrEqualTo(1);
assertFilePathFileStatus(statuses1[0], fs.makeQualified(statuses1[0].getPath()));
}
@Test
public void testListStatusOnEmptyDirectory() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path emptyDir = new Path("/emptyDir");
fs.mkdirs(emptyDir);
FileStatus[] statuses = fs.listStatus(emptyDir);
assertThat(statuses.length)
.describedAs("List size is not expected").isEqualTo(0);
}
@Test
public void testListStatusOnRenamePendingJsonFile() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path renamePendingJsonPath = new Path("/hbase/A/A-" + SUFFIX);
fs.create(renamePendingJsonPath);
FileStatus[] statuses = fs.listStatus(renamePendingJsonPath);
assertThat(statuses.length)
.describedAs("List size is not expected").isEqualTo(1);
assertFilePathFileStatus(statuses[0], fs.makeQualified(statuses[0].getPath()));
}
@Test
public void testContinuationTokenAcrossListStatus() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path path = new Path("/testContinuationToken");
fs.mkdirs(path);
fs.create(new Path(path + "/file1"));
fs.create(new Path(path + "/file2"));
fs.listStatus(path);
ListResponseData listResponseData = fs.getAbfsStore().getClient().listPath(
"/testContinuationToken", false, 1, null, getTestTracingContext(fs, true),
fs.getAbfsStore().getUri());
assertThat(listResponseData.getContinuationToken())
.describedAs("Continuation Token Should not be null").isNotNull();
assertThat(listResponseData.getFileStatusList())
.describedAs("Listing Size Not as expected").hasSize(1);
ListResponseData listResponseData1 = fs.getAbfsStore().getClient().listPath(
"/testContinuationToken", false, 1, listResponseData.getContinuationToken(), getTestTracingContext(fs, true),
fs.getAbfsStore().getUri());
assertThat(listResponseData1.getContinuationToken())
.describedAs("Continuation Token Should be null").isNull();
assertThat(listResponseData1.getFileStatusList())
.describedAs("Listing Size Not as expected").hasSize(1);
}
@Test
public void testInvalidContinuationToken() throws Exception {
assumeHnsDisabled();
final AzureBlobFileSystem fs = getFileSystem();
Path path = new Path("/testInvalidContinuationToken");
fs.mkdirs(path);
fs.create(new Path(path + "/file1"));
fs.create(new Path(path + "/file2"));
intercept(AbfsRestOperationException.class,
() -> fs.getAbfsStore().getClient().listPath(
"/testInvalidContinuationToken", false, 1, "invalidToken",
getTestTracingContext(fs, true), fs.getAbfsStore().getUri()));
}
@Test
public void testEmptyContinuationToken() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
Path path = new Path("/testInvalidContinuationToken");
fs.mkdirs(path);
fs.create(new Path(path + "/file1"));
fs.create(new Path(path + "/file2"));
ListResponseData listResponseData = fs.getAbfsStore().getClient().listPath(
"/testInvalidContinuationToken", false, 1, "",
getTestTracingContext(fs, true), fs.getAbfsStore().getUri());
assertThat(listResponseData.getContinuationToken())
.describedAs("Continuation Token Should Not be null").isNotNull();
assertThat(listResponseData.getFileStatusList())
.describedAs("Listing Size Not as expected").hasSize(1);
}
/**
* Test to verify that listStatus returns the correct file status
* after removing duplicates across multiple iterations of list blobs.
* Also verifies that in case of non-empty explicit dir,
* entry corresponding to marker blob is returned.
* @throws Exception if test fails.
*/
@Test
public void testDuplicateEntriesAcrossListBlobIterations() throws Exception {
AzureBlobFileSystem fs = Mockito.spy(getFileSystem());
AzureBlobFileSystemStore store = Mockito.spy(fs.getAbfsStore());
store.getAbfsConfiguration().setListMaxResults(1);
AbfsClient client = Mockito.spy(store.getClient());
Mockito.doReturn(store).when(fs).getAbfsStore();
Mockito.doReturn(client).when(store).getClient();
/*
* Following entries will be created inside the root path.
* 0. /A - implicit directory without any marker blob
* 1. /a - marker file for explicit directory
* 2. /a/file1 - normal file inside explicit directory
* 3. /b - normal file inside root
* 4. /c - marker file for explicit directory
* 5. /c.bak - marker file for explicit directory
* 6. /c.bak/file2 - normal file inside explicit directory
* 7. /c/file3 - normal file inside explicit directory
* 8. /d - implicit directory
* 9. /e - marker file for explicit directory
* 10. /e/file4 - normal file inside explicit directory
*/
// Create Path 0
createAzCopyFolder(new Path("/A"));
// Create Path 1 and 2.
fs.create(new Path("/a/file1"));
// Create Path 3
fs.create(new Path("/b"));
// Create Path 4 and 7
fs.create(new Path("/c/file3"));
// Create Path 5 and 6
fs.create(new Path("/c.bak/file2"));
// Create Path 8
createAzCopyFolder(new Path("/d"));
// Create Path 9 and 10
fs.create(new Path("/e/file4"));
FileStatus[] fileStatuses = fs.listStatus(new Path(ROOT_PATH));
// Assert that client.listPath was called 11 times.
// This will assert server returned 11 entries in total.
Mockito.verify(client, Mockito.times(TOTAL_NUMBER_OF_PATHS))
.listPath(eq(ROOT_PATH), eq(false), eq(1), any(), any(), any());
// Assert that after duplicate removal, only 7 unique entries are returned.
assertThat(fileStatuses.length)
.describedAs("List size is not expected").isEqualTo(NUMBER_OF_UNIQUE_PATHS);
// Assert that for duplicates, entry corresponding to marker blob is returned.
assertImplicitDirectoryFileStatus(fileStatuses[0], fs.makeQualified(new Path("/A")));
assertExplicitDirectoryFileStatus(fileStatuses[1], fs.makeQualified(new Path("/a")));
assertFilePathFileStatus(fileStatuses[2], fs.makeQualified(new Path("/b")));
assertExplicitDirectoryFileStatus(fileStatuses[3], fs.makeQualified(new Path("/c")));
assertExplicitDirectoryFileStatus(fileStatuses[4], fs.makeQualified(new Path("/c.bak")));
assertImplicitDirectoryFileStatus(fileStatuses[5], fs.makeQualified(new Path("/d")));
assertExplicitDirectoryFileStatus(fileStatuses[6], fs.makeQualified(new Path("/e")));
// Assert that there are no duplicates in the returned file statuses.
Set<Path> uniquePaths = new HashSet<>();
for (FileStatus fileStatus : fileStatuses) {
assertThat(uniquePaths.add(fileStatus.getPath()))
.describedAs("Duplicate Entries found")
.isTrue();
}
}
private void assertFilePathFileStatus(final FileStatus fileStatus,
final Path qualifiedPath) {
assertThat(fileStatus.getPath())
.describedAs("Path Not as expected").isEqualTo(qualifiedPath);
assertThat(fileStatus.isFile())
.describedAs("Expecting a File Path").isEqualTo(true);
assertThat(fileStatus.isDirectory())
.describedAs("Expecting a File Path").isEqualTo(false);
assertThat(fileStatus.getModificationTime()).isNotEqualTo(0);
}
private void assertImplicitDirectoryFileStatus(final FileStatus fileStatus,
final Path qualifiedPath) throws Exception {
assertDirectoryFileStatus(fileStatus, qualifiedPath);
DirectoryStateHelper.isImplicitDirectory(qualifiedPath, getFileSystem(),
getTestTracingContext(getFileSystem(), true));
assertThat(fileStatus.getModificationTime())
.describedAs("Last Modified Time Not as Expected").isEqualTo(0);
}
private void assertExplicitDirectoryFileStatus(final FileStatus fileStatus,
final Path qualifiedPath) throws Exception {
assertDirectoryFileStatus(fileStatus, qualifiedPath);
DirectoryStateHelper.isExplicitDirectory(qualifiedPath, getFileSystem(),
getTestTracingContext(getFileSystem(), true));
assertThat(fileStatus.getModificationTime())
.describedAs("Last Modified Time Not as Expected").isNotEqualTo(0);
}
private void assertDirectoryFileStatus(final FileStatus fileStatus,
final Path qualifiedPath) {
assertThat(fileStatus.getPath())
.describedAs("Path Not as Expected").isEqualTo(qualifiedPath);
assertThat(fileStatus.isDirectory())
.describedAs("Expecting a Directory Path").isEqualTo(true);
assertThat(fileStatus.isFile())
.describedAs("Expecting a Directory Path").isEqualTo(false);
assertThat(fileStatus.getLen())
.describedAs("Content Length Not as Expected").isEqualTo(0);
}
/**
* Helper method to mock the AbfsRestOperation and modify the request headers.
*
* @param abfsBlobClient the mocked AbfsBlobClient
* @param newHeader the header to add in place of the old one
*/
public static void mockAbfsRestOperation(AbfsBlobClient abfsBlobClient, String... newHeader) {
Mockito.doAnswer(invocation -> {
List<AbfsHttpHeader> requestHeaders = invocation.getArgument(3);
// Remove the actual HDI config header and add the new one
requestHeaders.removeIf(header ->
HttpHeaderConfigurations.X_MS_META_HDI_ISFOLDER.equals(header.getName()));
for (String header : newHeader) {
requestHeaders.add(new AbfsHttpHeader(X_MS_METADATA_PREFIX + header, TRUE));
}
// Call the real method
return invocation.callRealMethod();
}).when(abfsBlobClient).getAbfsRestOperation(eq(AbfsRestOperationType.PutBlob),
eq(HTTP_METHOD_PUT), any(URL.class), anyList());
}
/**
* Helper method to mock the AbfsBlobClient and set up the client handler.
*
* @param fs the AzureBlobFileSystem instance
* @return the mocked AbfsBlobClient
*/
public static AbfsBlobClient mockIngressClientHandler(AzureBlobFileSystem fs) {
AzureBlobFileSystemStore store = Mockito.spy(fs.getAbfsStore());
AbfsClientHandler clientHandler = Mockito.spy(store.getClientHandler());
AbfsBlobClient abfsBlobClient = (AbfsBlobClient) Mockito.spy(
clientHandler.getClient());
fs.getAbfsStore().setClient(abfsBlobClient);
fs.getAbfsStore().setClientHandler(clientHandler);
Mockito.doReturn(abfsBlobClient).when(clientHandler).getIngressClient();
return abfsBlobClient;
}
/**
* Test directory status with different HDI folder configuration,
* verifying the correct header and directory state.
*/
private void testIsDirectory(boolean expected, String... configName) throws Exception {
try (AzureBlobFileSystem fs = Mockito.spy(
(AzureBlobFileSystem) FileSystem.newInstance(getFileSystem().getConf()))) {
assumeBlobServiceType();
AbfsBlobClient abfsBlobClient = mockIngressClientHandler(fs);
// Mock the operation to modify the headers
mockAbfsRestOperation(abfsBlobClient, configName);
// Create the path and invoke mkdirs method
Path path = new Path("/testPath");
fs.mkdirs(path);
// Assert that the response header has the updated value
FileStatus[] fileStatus = fs.listStatus(path.getParent());
AbfsHttpOperation op = abfsBlobClient.getPathStatus(
path.toUri().getPath(),
true, getTestTracingContext(fs, true),
null).getResult();
assertThat(abfsBlobClient.checkIsDir(op))
.describedAs("Directory should be marked as " + expected)
.isEqualTo(expected);
// Verify the header and directory state
assertThat(fileStatus.length)
.describedAs("Expected directory state: " + expected)
.isEqualTo(1);
// Verify the header and directory state
assertThat(fileStatus[0].isDirectory())
.describedAs("Expected directory state: " + expected)
.isEqualTo(expected);
fs.delete(path, true);
}
}
/**
* Test to verify the directory status with different HDI folder configurations.
* Verifying the correct header and directory state.
*/
@Test
public void testIsDirectoryWithDifferentCases() throws Exception {
testIsDirectory(true, "HDI_ISFOLDER");
testIsDirectory(true, "Hdi_ISFOLDER");
testIsDirectory(true, "Hdi_isfolder");
testIsDirectory(true, "hdi_isfolder");
testIsDirectory(false, "Hdi_isfolder1");
testIsDirectory(true, "HDI_ISFOLDER", "Hdi_ISFOLDER", "Hdi_isfolder");
testIsDirectory(true, "HDI_ISFOLDER", "Hdi_ISFOLDER1", "Test");
}
}
|
googleapis/google-cloud-java | 37,442 | java-telcoautomation/proto-google-cloud-telcoautomation-v1alpha1/src/main/java/com/google/cloud/telcoautomation/v1alpha1/ListDeploymentsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/telcoautomation/v1alpha1/telcoautomation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.telcoautomation.v1alpha1;
/**
*
*
* <pre>
* Request object for `ListDeployments`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest}
*/
public final class ListDeploymentsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest)
ListDeploymentsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDeploymentsRequest.newBuilder() to construct.
private ListDeploymentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDeploymentsRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDeploymentsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest.class,
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering only supports equality on deployment state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = APPLIED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on deployment state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = APPLIED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of deployments to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListDeployments call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListDeployments call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest)) {
return super.equals(obj);
}
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest other =
(com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request object for `ListDeployments`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest)
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest.class,
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest.Builder.class);
}
// Construct using com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest
getDefaultInstanceForType() {
return com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest build() {
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest buildPartial() {
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest result =
new com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest) {
return mergeFrom((com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest other) {
if (other
== com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent orchestration cluster resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering only supports equality on deployment state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = APPLIED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on deployment state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = APPLIED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on deployment state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = APPLIED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on deployment state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = APPLIED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering only supports equality on deployment state.
* It should be in the form: "state = DRAFT". `OR` operator can be used to
* get response for multiple states. e.g. "state = DRAFT OR state = APPLIED".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of deployments to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of deployments to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of deployments to return per page.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListDeployments call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListDeployments call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListDeployments call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListDeployments call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The page token, received from a previous ListDeployments call.
* It can be provided to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest)
private static final com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest();
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDeploymentsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListDeploymentsRequest>() {
@java.lang.Override
public ListDeploymentsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDeploymentsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDeploymentsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.