text
stringlengths
7
1.01M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.util; /** * Holder for a list of constants describing which bugs which have not been * fixed. * * <p>You can use these constants to control the flow of your code. For example, * suppose that bug CALCITE-123 causes the "INSERT" statement to return an * incorrect row-count, and you want to disable unit tests. You might use the * constant in your code as follows: * * <blockquote> * <pre>Statement stmt = connection.createStatement(); * int rowCount = stmt.execute( * "INSERT INTO FemaleEmps SELECT * FROM Emps WHERE gender = 'F'"); * if (Bug.CALCITE_123_FIXED) { * assertEquals(rowCount, 5); * }</pre> * </blockquote> * * <p>The usage of the constant is a convenient way to identify the impact of * the bug. When someone fixes the bug, they will remove the constant and all * usages of it. Also, the constant helps track the propagation of the fix: as * the fix is integrated into other branches, the constant will be removed from * those branches.</p> * */ public abstract class Bug { //~ Static fields/initializers --------------------------------------------- // ----------------------------------------------------------------------- // Developers should create new fields here, in their own section. This // will make merge conflicts much less likely than if everyone is // appending. public static final boolean DT239_FIXED = false; public static final boolean DT785_FIXED = false; // jhyde /** * Whether <a href="http://issues.eigenbase.org/browse/FNL-3">issue * Fnl-3</a> is fixed. */ public static final boolean FNL3_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-327">issue * FRG-327: AssertionError while translating IN list that contains null</a> * is fixed. */ public static final boolean FRG327_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-377">issue * FRG-377: Regular character set identifiers defined in SQL:2008 spec like * :ALPHA:, * :UPPER:, :LOWER:, ... etc. are not yet implemented in * SIMILAR TO expressions.</a> is fixed. */ public static final boolean FRG377_FIXED = false; /** * Whether dtbug1684 "CURRENT_DATE not implemented in fennel calc" is fixed. */ public static final boolean DT1684_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FNL-25">issue * FNL-25</a> is fixed. (also filed as dtbug 153) */ public static final boolean FNL25_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-73">issue FRG-73: * miscellaneous bugs with nested comments</a> is fixed. */ public static final boolean FRG73_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-78">issue FRG-78: * collation clause should be on expression instead of identifier</a> is * fixed. */ public static final boolean FRG78_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-189">issue * FRG-189: FarragoAutoVmOperatorTest.testSelect fails</a> is fixed. */ public static final boolean FRG189_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-254">issue * FRG-254: environment-dependent failure for * SqlOperatorTest.testPrefixPlusOperator</a> is fixed. */ public static final boolean FRG254_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-282">issue * FRG-282: Support precision in TIME and TIMESTAMP data types</a> is fixed. */ public static final boolean FRG282_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-296">issue * FRG-296: SUBSTRING(string FROM regexp FOR regexp)</a> is fixed. */ public static final boolean FRG296_FIXED = false; /** * Whether <a href="http://issues.eigenbase.org/browse/FRG-375">issue * FRG-375: The expression VALUES ('cd' SIMILAR TO '[a-e^c]d') returns TRUE. * It should return FALSE.</a> is fixed. */ public static final boolean FRG375_FIXED = false; /** Whether * <a href="https://issues.apache.org/jira/browse/CALCITE-194">[CALCITE-194] * Array items in MongoDB adapter</a> is fixed. */ public static final boolean CALCITE_194_FIXED = false; /** Whether the remaining issues raised in * <a href="https://issues.apache.org/jira/browse/CALCITE-461">[CALCITE-461] * Convert more planner rules to handle grouping sets</a> are fixed. * * <p>Now that [CALCITE-461] is fixed, the tracking bug is * <a href="https://issues.apache.org/jira/browse/CALCITE-574">[CALCITE-574] * Remove org.apache.calcite.util.Bug.CALCITE_461_FIXED</a>. */ public static final boolean CALCITE_461_FIXED = false; /** Whether * <a href="https://issues.apache.org/jira/browse/CALCITE-673">[CALCITE-673] * Timeout executing joins against MySQL</a> is fixed. */ public static final boolean CALCITE_673_FIXED = false; /** Whether * <a href="https://issues.apache.org/jira/browse/CALCITE-1048">[CALCITE-1048] * Make metadata more robust</a> is fixed. */ public static final boolean CALCITE_1048_FIXED = false; /** Whether * <a href="https://issues.apache.org/jira/browse/CALCITE-1045">[CALCITE-1045] * Decorrelate sub-queries in Project and Join</a> is fixed. */ public static final boolean CALCITE_1045_FIXED = false; /** Whether * <a href="https://issues.apache.org/jira/browse/CALCITE-2400">[CALCITE-2400] * Allow standards-compliant column ordering for NATURAL JOIN and JOIN USING * when dynamic tables are used</a> is fixed. */ public static final boolean CALCITE_2400_FIXED = false; /** Whether * <a href="https://issues.apache.org/jira/browse/CALCITE-2401">[CALCITE-2401] * Improve RelMdPredicates performance</a> */ public static final boolean CALCITE_2401_FIXED = false; /** Whether * <a href="https://issues.apache.org/jira/browse/CALCITE-2539">[CALCITE-2539] * Several test case not passed in CalciteSqlOperatorTest.java</a> is fixed. */ public static final boolean CALCITE_2539_FIXED = false; /** * Use this to flag temporary code. */ public static final boolean TODO_FIXED = false; /** * Use this method to flag temporary code. * * <p>Example #1: * <blockquote><pre> * if (Bug.remark("baz fixed") == null) { * baz(); * }</pre></blockquote> * * <p>Example #2: * <blockquote><pre> * /&#42;&#42; &#64;see Bug#remark Remove before checking in &#42;/ * void uselessMethod() {} * </pre></blockquote> */ public static <T> T remark(T remark) { return remark; } /** * Use this method to flag code that should be re-visited after upgrading * a component. * * <p>If the intended change is that a class or member be removed, flag * instead using a {@link Deprecated} annotation followed by a comment such as * "to be removed before 2.0". */ public static boolean upgrade(String remark) { Util.discard(remark); return false; } } // End Bug.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2018 the original author or authors. */ package org.assertj.swing.fixture; /** * Hexadecimal values that represent colors. * * @author Alex Ruiz */ final class ColorHexCodes { static final String BLUE_HEX_CODE = "0000FF"; static final String BLACK_HEX_CODE = "000000"; private ColorHexCodes() { } }
package com.example.threeseasons.maingame; import androidx.appcompat.app.AppCompatActivity; import android.content.Intent; import android.os.Bundle; import android.view.View; import com.example.threeseasons.R; import com.example.threeseasons.data.User; public class NoEndingActivity extends AppCompatActivity implements View.OnClickListener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_no_ending); } @Override public void onClick(View view) { switch (view.getId()) { case R.id.try_again: Intent BackIntent = new Intent(getApplicationContext(), NewGameActivity.class); BackIntent.putExtra("user", (User) getIntent().getSerializableExtra("user")); startActivity(BackIntent); break; } } }
package com.upseil.game.client; import static com.upseil.game.Constants.GameInit.*; import com.badlogic.gdx.ApplicationListener; import com.badlogic.gdx.ApplicationLogger; import com.badlogic.gdx.backends.gwt.GwtApplication; import com.badlogic.gdx.backends.gwt.GwtApplicationConfiguration; import com.badlogic.gdx.backends.gwt.preloader.Preloader.PreloaderCallback; import com.badlogic.gdx.backends.gwt.preloader.Preloader.PreloaderState; import com.github.nmorel.gwtjackson.client.JsonDeserializationContext; import com.github.nmorel.gwtjackson.client.JsonSerializationContext; import com.github.nmorel.gwtjackson.client.ObjectMapper; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style; import com.google.gwt.event.logical.shared.ResizeEvent; import com.google.gwt.event.logical.shared.ResizeHandler; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HasHorizontalAlignment; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.RootPanel; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.VerticalPanel; import com.upseil.game.Constants.GameInit; import com.upseil.game.GameApplication; import com.upseil.game.Savegame; import com.upseil.game.SerializationContext; import com.upseil.game.domain.Color; import com.upseil.gdx.gwt.serialization.HtmlCompressingMapper; import com.upseil.gdx.gwt.util.BrowserConsoleLogger; import com.upseil.gdx.util.format.DoubleFormatter; import com.upseil.gdx.util.format.DoubleFormatter.Format; import com.upseil.gdx.util.properties.Properties; public class HtmlLauncher extends GwtApplication { public interface SavegameMapper extends ObjectMapper<Savegame> { } private static final String AutoHorizontalMargin = "auto-horizontal-margin"; private static final String AutoVerticalMargin = "auto-vertical-margin"; private static final Properties<GameInit> GameInit = Properties.fromPropertiesText(Resources.Instance.gameInitText().getText(), GameInit.class); private float widthHeightRatio = -1; private int minWidth; private int minHeight; private int prefWidth; private int prefHeight; private int width; private int height; @Override public void onModuleLoad() { super.setApplicationLogger(new BrowserConsoleLogger()); setLogLevel(LOG_INFO); if (GameInit.contains(Width) && GameInit.contains(Height)) { minWidth = GameInit.getInt(Width); minHeight = GameInit.getInt(Height); prefWidth = minWidth; prefHeight = minHeight; widthHeightRatio = (float) minWidth / minHeight; } else { minWidth = GameInit.getInt(MinWidth, 0); minHeight = GameInit.getInt(MinHeight, 0); if (minWidth > 0 && minHeight > 0) { widthHeightRatio = (float) minWidth / minHeight; } prefWidth = GameInit.getInt(PrefWidth, Integer.MAX_VALUE); prefHeight = GameInit.getInt(PrefHeight, Integer.MAX_VALUE); if (prefWidth == Integer.MAX_VALUE && prefHeight == Integer.MAX_VALUE) { if (minWidth > minHeight) { prefHeight = toHeight(prefWidth); } else { prefWidth = toWidth(prefHeight); } } else { widthHeightRatio = (float) prefWidth / prefHeight; } } super.onModuleLoad(); } @Override public void setApplicationLogger(ApplicationLogger applicationLogger) { // No-op to prevent that the browser console logger is overwritten by super.onModuleLoad() } @Override public ApplicationListener createApplicationListener() { setLoadingListener(new LoadingListener() { @Override public void beforeSetup() { } @Override public void afterSetup() { setupResizing(); } }); JsonSerializationContext serializationContext = JsonSerializationContext.builder().indent(false).build(); JsonDeserializationContext deserializationContext = JsonDeserializationContext.builder().failOnUnknownProperties(false).build(); SavegameMapper savegameMapper = GWT.create(SavegameMapper.class); HtmlCompressingMapper<Savegame> htmlSavegameMapper = new HtmlCompressingMapper<>(savegameMapper, serializationContext, deserializationContext); htmlSavegameMapper.setCompressing(true); SerializationContext context = new SerializationContext(htmlSavegameMapper); return new GameApplication(context); } private void setupResizing() { Window.setMargin("0"); Window.addResizeHandler(new ResizeHandler() { @Override public void onResize(ResizeEvent event) { int clientWidth = event.getWidth(); int clientHeight = event.getHeight(); calculateSize(clientWidth, clientHeight); Window.enableScrolling(width > clientWidth || height > clientHeight); getGraphics().setWindowedMode(width, height); Panel rootPanel = getRootPanel(); rootPanel.setSize(width + "px", height + "px"); if (width > clientWidth) { rootPanel.removeStyleName(AutoHorizontalMargin); } else { rootPanel.addStyleName(AutoHorizontalMargin); } if (height > clientHeight) { rootPanel.removeStyleName(AutoVerticalMargin); } else { rootPanel.addStyleName(AutoVerticalMargin); } } }); } @Override public GwtApplicationConfiguration getConfig() { int clientWidth = Window.getClientWidth(); int clientHeight = Window.getClientHeight(); calculateSize(clientWidth, clientHeight); Window.enableScrolling(width > clientWidth || height > clientHeight); Panel rootPanel = createGamePanel(width, height); if (width <= clientWidth) { rootPanel.addStyleName(AutoHorizontalMargin); } if (height <= clientHeight) { rootPanel.addStyleName(AutoVerticalMargin); } GwtApplicationConfiguration configuration = new GwtApplicationConfiguration(width, height); configuration.preferFlash = false; configuration.rootPanel = rootPanel; configuration.antialiasing = GameInit.getInt(MsaaSamples) > 0; return configuration; } private void calculateSize(int clientWidth, int clientHeight) { if (widthHeightRatio <= 0) { width = clientWidth; height = clientHeight; } else { width = prefWidth; height = prefHeight; if (width > clientWidth) { width = Math.max(clientWidth, minWidth); height = toHeight(width); } if (height > clientHeight) { height = Math.max(clientHeight, minHeight); width = toWidth(height); } } } private Panel createGamePanel(int width, int height) { Panel gamePanel = new FlowPanel() { @Override public void onBrowserEvent(Event event) { int eventType = DOM.eventGetType(event); if (eventType == Event.ONMOUSEDOWN || eventType == Event.ONMOUSEUP) { event.preventDefault(); event.stopPropagation(); Element target = event.getEventTarget().cast(); target.getStyle().setProperty("cursor", eventType == Event.ONMOUSEDOWN ? "default" : ""); } super.onBrowserEvent(event); } }; gamePanel.setWidth(width + "px"); gamePanel.setHeight(height + "px"); gamePanel.addStyleName("root"); RootPanel.get().add(gamePanel); return gamePanel; } @Override public PreloaderCallback getPreloaderCallback() { FlowPanel dotContainer = new FlowPanel(); dotContainer.addStyleName("loading-dot-container"); Element[] dots = new Element[Color.size()]; for (int i = 0; i < dots.length; i++) { String color = "var(--color" + i + ")"; SimplePanel dot = new SimplePanel(); dot.setSize("0%", "0%"); dot.addStyleName("loading-dot"); dot.getElement().getStyle().setBackgroundColor(color); SimplePanel dotWithBorder = new SimplePanel(dot); dotWithBorder.addStyleName("loading-dot-border"); dotWithBorder.getElement().getStyle().setBorderColor(color); dotContainer.add(dotWithBorder); dots[i] = dot.getElement(); } Label label = new Label("Loading"); label.addStyleName("loading-label"); VerticalPanel container = new VerticalPanel(); container.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_CENTER); container.addStyleName("loading-container"); container.add(label); container.add(dotContainer); getRootPanel().add(container); float progressStep = 1.0f / dots.length; return new PreloaderCallback() { @Override public void error(String file) { HtmlLauncher.this.error("Preloading error", file); } @Override public void update(PreloaderState state) { float progress = state.getProgress(); log("Preload Progress", progress + ""); DoubleFormatter percentFormatter = DoubleFormatter.get(Format.Percent); for (int i = 0; i < dots.length; i++) { Element dot = dots[i]; float dotProgress = Math.max(0, progress - i * progressStep) / progressStep; dotProgress = Math.min(dotProgress, 1); String size = percentFormatter.apply(dotProgress); String margin = percentFormatter.apply(1 - dotProgress); Style style = dot.getStyle(); style.setProperty("width", size); style.setProperty("height", size); style.setProperty("margin", margin + " " + margin + " 0 0"); } } }; } private int toHeight(int width) { if (widthHeightRatio <= 0) { return width; } return Math.round(width / widthHeightRatio); } private int toWidth(int height) { if (widthHeightRatio <= 0) { return height; } return Math.round(height * widthHeightRatio); } }
/* */ package com.elcuk.jaxb; /* */ /* */ import javax.xml.bind.annotation.XmlEnum; /* */ import javax.xml.bind.annotation.XmlEnumValue; /* */ import javax.xml.bind.annotation.XmlType; /* */ /* */ @XmlType(name="DataTransferUnitOfMeasure") /* */ @XmlEnum /* */ public enum DataTransferUnitOfMeasure /* */ { /* 31 */ K_HZ("KHz"), /* */ /* 33 */ M_HZ("MHz"), /* */ /* 35 */ G_HZ("GHz"), /* */ /* 37 */ MBPS("Mbps"), /* */ /* 39 */ GBPS("Gbps"); /* */ /* */ private final String value; /* */ /* */ private DataTransferUnitOfMeasure(String v) { /* 44 */ this.value = v; /* */ } /* */ /* */ public String value() { /* 48 */ return this.value; /* */ } /* */ /* */ public static DataTransferUnitOfMeasure fromValue(String v) { /* 52 */ for (DataTransferUnitOfMeasure c : values()) { /* 53 */ if (c.value.equals(v)) { /* 54 */ return c; /* */ } /* */ } /* 57 */ throw new IllegalArgumentException(v); /* */ } /* */ } /* Location: /Users/mac/Desktop/jaxb/ * Qualified Name: com.elcuk.jaxb.DataTransferUnitOfMeasure * JD-Core Version: 0.6.2 */
package com.salesmanager.shop.store.controller.customer; import com.salesmanager.core.business.exception.ConversionException; import com.salesmanager.core.business.exception.ServiceException; import com.salesmanager.core.business.services.catalog.product.PricingService; import com.salesmanager.core.business.services.reference.country.CountryService; import com.salesmanager.core.business.services.reference.language.LanguageService; import com.salesmanager.core.business.services.reference.zone.ZoneService; import com.salesmanager.core.business.services.shoppingcart.ShoppingCartCalculationService; import com.salesmanager.core.business.services.system.EmailService; import com.salesmanager.core.business.utils.CoreConfiguration; import com.salesmanager.core.model.customer.Customer; import com.salesmanager.core.model.merchant.MerchantStore; import com.salesmanager.core.model.reference.country.Country; import com.salesmanager.core.model.reference.language.Language; import com.salesmanager.core.model.reference.zone.Zone; import com.salesmanager.core.model.shoppingcart.ShoppingCart; import com.salesmanager.shop.constants.ApplicationConstants; import com.salesmanager.shop.constants.Constants; import com.salesmanager.shop.model.customer.AnonymousCustomer; import com.salesmanager.shop.model.customer.CustomerEntity; import com.salesmanager.shop.model.customer.SecuredShopPersistableCustomer; import com.salesmanager.shop.model.shoppingcart.ShoppingCartData; import com.salesmanager.shop.populator.shoppingCart.ShoppingCartDataPopulator; import com.salesmanager.shop.store.controller.AbstractController; import com.salesmanager.shop.store.controller.ControllerConstants; import com.salesmanager.shop.store.controller.customer.facade.CustomerFacade; import com.salesmanager.shop.utils.CaptchaRequestUtils; import com.salesmanager.shop.utils.EmailTemplatesUtils; import com.salesmanager.shop.utils.ImageFilePath; import com.salesmanager.shop.utils.LabelUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.validation.FieldError; import org.springframework.validation.ObjectError; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import javax.inject.Inject; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import java.util.Collections; import java.util.List; import java.util.Locale; //import com.salesmanager.core.business.customer.CustomerRegistrationException; /** * Registration of a new customer * @author Carl Samson * */ @SuppressWarnings( "deprecation" ) // http://stackoverflow.com/questions/17444258/how-to-use-new-passwordencoder-from-spring-security @Controller @RequestMapping("/shop/customer") public class CustomerRegistrationController extends AbstractController { private static final Logger LOGGER = LoggerFactory.getLogger(CustomerRegistrationController.class); @Inject private CoreConfiguration coreConfiguration; @Inject private LanguageService languageService; @Inject private CountryService countryService; @Inject private ZoneService zoneService; @Inject private PasswordEncoder passwordEncoder; @Inject EmailService emailService; @Inject private LabelUtils messages; @Inject private CustomerFacade customerFacade; @Inject private AuthenticationManager customerAuthenticationManager; @Inject private EmailTemplatesUtils emailTemplatesUtils; @Inject private CaptchaRequestUtils captchaRequestUtils; @Inject @Qualifier("img") private ImageFilePath imageUtils; @Inject private ShoppingCartCalculationService shoppingCartCalculationService; @Inject private PricingService pricingService; @Value("${config.recaptcha.siteKey}") private String siteKeyKey; @RequestMapping(value="/registration.html", method=RequestMethod.GET) public String displayRegistration(final Model model, final HttpServletRequest request, final HttpServletResponse response) throws Exception { MerchantStore store = (MerchantStore)request.getAttribute(Constants.MERCHANT_STORE); model.addAttribute( "recapatcha_public_key", siteKeyKey); SecuredShopPersistableCustomer customer = new SecuredShopPersistableCustomer(); AnonymousCustomer anonymousCustomer = (AnonymousCustomer)request.getAttribute(Constants.ANONYMOUS_CUSTOMER); if(anonymousCustomer!=null) { customer.setBilling(anonymousCustomer.getBilling()); } model.addAttribute("customer", customer); /** template **/ StringBuilder template = new StringBuilder().append(ControllerConstants.Tiles.Customer.register).append(".").append(store.getStoreTemplate()); return template.toString(); } @RequestMapping( value = "/register.html", method = RequestMethod.POST ) public String registerCustomer( @Valid @ModelAttribute("customer") SecuredShopPersistableCustomer customer, BindingResult bindingResult, Model model, HttpServletRequest request, HttpServletResponse response, final Locale locale ) throws Exception { MerchantStore merchantStore = (MerchantStore) request.getAttribute( Constants.MERCHANT_STORE ); Language language = super.getLanguage(request); String userName = null; String password = null; model.addAttribute( "recapatcha_public_key", siteKeyKey); if(!StringUtils.isBlank(request.getParameter("g-recaptcha-response"))) { boolean validateCaptcha = captchaRequestUtils.checkCaptcha(request.getParameter("g-recaptcha-response")); if ( !validateCaptcha ) { LOGGER.debug( "Captcha response does not matched" ); FieldError error = new FieldError("captchaChallengeField","captchaChallengeField",messages.getMessage("validaion.recaptcha.not.matched", locale)); bindingResult.addError(error); } } if ( StringUtils.isNotBlank( customer.getUserName() ) ) { if ( customerFacade.checkIfUserExists( customer.getUserName(), merchantStore ) ) { LOGGER.debug( "Customer with username {} already exists for this store ", customer.getUserName() ); FieldError error = new FieldError("userName","userName",messages.getMessage("registration.username.already.exists", locale)); bindingResult.addError(error); } userName = customer.getUserName(); } if ( StringUtils.isNotBlank( customer.getPassword() ) && StringUtils.isNotBlank( customer.getCheckPassword() )) { if (! customer.getPassword().equals(customer.getCheckPassword()) ) { FieldError error = new FieldError("password","password",messages.getMessage("message.password.checkpassword.identical", locale)); bindingResult.addError(error); } password = customer.getPassword(); } if ( bindingResult.hasErrors() ) { LOGGER.debug( "found {} validation error while validating in customer registration ", bindingResult.getErrorCount() ); StringBuilder template = new StringBuilder().append( ControllerConstants.Tiles.Customer.register ).append( "." ).append( merchantStore.getStoreTemplate() ); return template.toString(); } @SuppressWarnings( "unused" ) CustomerEntity customerData = null; try { //set user clear password customer.setPassword(password); customerData = customerFacade.registerCustomer( customer, merchantStore, language ); } /* catch ( CustomerRegistrationException cre ) { LOGGER.error( "Error while registering customer.. ", cre); ObjectError error = new ObjectError("registration",messages.getMessage("registration.failed", locale)); bindingResult.addError(error); StringBuilder template = new StringBuilder().append( ControllerConstants.Tiles.Customer.register ).append( "." ).append( merchantStore.getStoreTemplate() ); return template.toString(); }*/ catch ( Exception e ) { LOGGER.error( "Error while registering customer.. ", e); ObjectError error = new ObjectError("registration",messages.getMessage("registration.failed", locale)); bindingResult.addError(error); StringBuilder template = new StringBuilder().append( ControllerConstants.Tiles.Customer.register ).append( "." ).append( merchantStore.getStoreTemplate() ); return template.toString(); } try { /** * Send registration email */ emailTemplatesUtils.sendRegistrationEmail( customer, merchantStore, locale, request.getContextPath() ); } catch(Exception e) { LOGGER.error("Cannot send email to customer ",e); } /** * Login user */ try { //refresh customer Customer c = customerFacade.getCustomerByUserName(customer.getUserName(), merchantStore); //authenticate customerFacade.authenticate(c, userName, password); super.setSessionAttribute(Constants.CUSTOMER, c, request); StringBuilder cookieValue = new StringBuilder(); cookieValue.append(merchantStore.getCode()).append("_").append(c.getNick()); //set username in the cookie Cookie cookie = new Cookie(Constants.COOKIE_NAME_USER, cookieValue.toString()); cookie.setMaxAge(60 * 24 * 3600); cookie.setPath(Constants.SLASH); response.addCookie(cookie); String sessionShoppingCartCode= (String)request.getSession().getAttribute( Constants.SHOPPING_CART ); if(!StringUtils.isBlank(sessionShoppingCartCode)) { ShoppingCart shoppingCart = customerFacade.mergeCart( c, sessionShoppingCartCode, merchantStore, language ); ShoppingCartData shoppingCartData=this.populateShoppingCartData(shoppingCart, merchantStore, language); if(shoppingCartData !=null) { request.getSession().setAttribute(Constants.SHOPPING_CART, shoppingCartData.getCode()); } //set username in the cookie Cookie c1 = new Cookie(Constants.COOKIE_NAME_CART, shoppingCartData.getCode()); c1.setMaxAge(60 * 24 * 3600); c1.setPath(Constants.SLASH); response.addCookie(c1); } return "redirect:/shop/customer/dashboard.html"; } catch(Exception e) { LOGGER.error("Cannot authenticate user ",e); ObjectError error = new ObjectError("registration",messages.getMessage("registration.failed", locale)); bindingResult.addError(error); } StringBuilder template = new StringBuilder().append( ControllerConstants.Tiles.Customer.register ).append( "." ).append( merchantStore.getStoreTemplate() ); return template.toString(); } @ModelAttribute("countryList") public List<Country> getCountries(final HttpServletRequest request){ Language language = (Language) request.getAttribute( "LANGUAGE" ); try { if ( language == null ) { language = (Language) request.getAttribute( "LANGUAGE" ); } if ( language == null ) { language = languageService.getByCode( Constants.DEFAULT_LANGUAGE ); } List<Country> countryList=countryService.getCountries( language ); return countryList; } catch ( ServiceException e ) { LOGGER.error( "Error while fetching country list ", e ); } return Collections.emptyList(); } @ModelAttribute("zoneList") public List<Zone> getZones(final HttpServletRequest request){ return zoneService.list(); } private ShoppingCartData populateShoppingCartData(final ShoppingCart cartModel , final MerchantStore store, final Language language){ ShoppingCartDataPopulator shoppingCartDataPopulator = new ShoppingCartDataPopulator(); shoppingCartDataPopulator.setShoppingCartCalculationService( shoppingCartCalculationService ); shoppingCartDataPopulator.setPricingService( pricingService ); try { return shoppingCartDataPopulator.populate( cartModel , store, language); } catch ( ConversionException ce ) { LOGGER.error( "Error in converting shopping cart to shopping cart data", ce ); } return null; } }
package mk.ukim.finki.contact.web.rest; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.hamcrest.Description; import org.hamcrest.TypeSafeDiagnosingMatcher; import org.springframework.format.datetime.standard.DateTimeFormatterRegistrar; import org.springframework.format.support.DefaultFormattingConversionService; import org.springframework.format.support.FormattingConversionService; import org.springframework.http.MediaType; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import static org.assertj.core.api.Assertions.assertThat; /** * Utility class for testing REST controllers. */ public class TestUtil { /** MediaType for JSON UTF8 */ public static final MediaType APPLICATION_JSON_UTF8 = new MediaType( MediaType.APPLICATION_JSON.getType(), MediaType.APPLICATION_JSON.getSubtype(), StandardCharsets.UTF_8); /** * Convert an object to JSON byte array. * * @param object * the object to convert * @return the JSON byte array * @throws IOException */ public static byte[] convertObjectToJsonBytes(Object object) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY); JavaTimeModule module = new JavaTimeModule(); mapper.registerModule(module); return mapper.writeValueAsBytes(object); } /** * Create a byte array with a specific size filled with specified data. * * @param size the size of the byte array * @param data the data to put in the byte array * @return the JSON byte array */ public static byte[] createByteArray(int size, String data) { byte[] byteArray = new byte[size]; for (int i = 0; i < size; i++) { byteArray[i] = Byte.parseByte(data, 2); } return byteArray; } /** * A matcher that tests that the examined string represents the same instant as the reference datetime. */ public static class ZonedDateTimeMatcher extends TypeSafeDiagnosingMatcher<String> { private final ZonedDateTime date; public ZonedDateTimeMatcher(ZonedDateTime date) { this.date = date; } @Override protected boolean matchesSafely(String item, Description mismatchDescription) { try { if (!date.isEqual(ZonedDateTime.parse(item))) { mismatchDescription.appendText("was ").appendValue(item); return false; } return true; } catch (DateTimeParseException e) { mismatchDescription.appendText("was ").appendValue(item) .appendText(", which could not be parsed as a ZonedDateTime"); return false; } } @Override public void describeTo(Description description) { description.appendText("a String representing the same Instant as ").appendValue(date); } } /** * Creates a matcher that matches when the examined string reprensents the same instant as the reference datetime * @param date the reference datetime against which the examined string is checked */ public static ZonedDateTimeMatcher sameInstant(ZonedDateTime date) { return new ZonedDateTimeMatcher(date); } /** * Verifies the equals/hashcode contract on the domain object. */ public static <T> void equalsVerifier(Class<T> clazz) throws Exception { T domainObject1 = clazz.getConstructor().newInstance(); assertThat(domainObject1.toString()).isNotNull(); assertThat(domainObject1).isEqualTo(domainObject1); assertThat(domainObject1.hashCode()).isEqualTo(domainObject1.hashCode()); // Test with an instance of another class Object testOtherObject = new Object(); assertThat(domainObject1).isNotEqualTo(testOtherObject); assertThat(domainObject1).isNotEqualTo(null); // Test with an instance of the same class T domainObject2 = clazz.getConstructor().newInstance(); assertThat(domainObject1).isNotEqualTo(domainObject2); // HashCodes are equals because the objects are not persisted yet assertThat(domainObject1.hashCode()).isEqualTo(domainObject2.hashCode()); } /** * Create a FormattingConversionService which use ISO date format, instead of the localized one. * @return the FormattingConversionService */ public static FormattingConversionService createFormattingConversionService() { DefaultFormattingConversionService dfcs = new DefaultFormattingConversionService (); DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar(); registrar.setUseIsoFormat(true); registrar.registerFormatters(dfcs); return dfcs; } }
package br.com.otta.bank.client.controller; import java.util.Collection; import javax.validation.Valid; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import br.com.otta.bank.client.model.ClientData; import br.com.otta.bank.client.model.ClientInformation; import br.com.otta.bank.client.service.ClientService; import io.swagger.v3.oas.annotations.Operation; /** * Classe para controlar o acesso aos recursos com funcionalidades dos clientes. * * @author Guilherme * */ @RestController @RequestMapping("client") public class ClientController { private final ClientService clientService; @Autowired public ClientController(ClientService clientService) { this.clientService = clientService; } @Operation(description = "Adiciona um cliente na base, utilizando as as informações passadas por paramêtro.") @PostMapping(consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<ClientInformation> save(@Valid @RequestBody ClientData clientData) { ClientInformation clientInformation = clientService.save(clientData); return ResponseEntity.ok(clientInformation); } @Operation(description = "Lista todos os clientes da base.") @GetMapping(produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<Collection<ClientInformation>> findAll() { return ResponseEntity.ok(clientService.findAll()); } }
/* * Copyright 2016-2020 chronicle.software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.queue.impl.table; import net.openhft.chronicle.bytes.MappedBytes; import net.openhft.chronicle.bytes.MappedFile; import net.openhft.chronicle.core.Jvm; import net.openhft.chronicle.core.Maths; import net.openhft.chronicle.core.StackTrace; import net.openhft.chronicle.core.annotation.UsedViaReflection; import net.openhft.chronicle.core.io.AbstractCloseable; import net.openhft.chronicle.core.io.ClosedIllegalStateException; import net.openhft.chronicle.core.io.IORuntimeException; import net.openhft.chronicle.core.util.StringUtils; import net.openhft.chronicle.core.values.LongValue; import net.openhft.chronicle.queue.impl.TableStore; import net.openhft.chronicle.queue.impl.single.MetaDataField; import net.openhft.chronicle.wire.*; import org.jetbrains.annotations.NotNull; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.io.StreamCorruptedException; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.nio.file.StandardOpenOption; import java.util.Objects; import java.util.function.Function; import java.util.function.Supplier; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static net.openhft.chronicle.core.util.Time.sleep; public class SingleTableStore<T extends Metadata> extends AbstractCloseable implements TableStore<T> { public static final String SUFFIX = ".cq4t"; private static final long timeoutMS = Long.getLong("chronicle.table.store.timeoutMS", 10_000); @NotNull private final WireType wireType; @NotNull private final T metadata; @NotNull private final MappedBytes mappedBytes; @NotNull private final MappedFile mappedFile; @NotNull private final Wire mappedWire; /** * used by {@link Demarshallable} * * @param wire a wire */ @SuppressWarnings("unused") @UsedViaReflection private SingleTableStore(@NotNull final WireIn wire) { assert wire.startUse(); try { this.wireType = Objects.requireNonNull(wire.read(MetaDataField.wireType).object(WireType.class)); this.mappedBytes = (MappedBytes) (wire.bytes()); this.mappedFile = mappedBytes.mappedFile(); wire.consumePadding(); if (wire.bytes().readRemaining() > 0) { this.metadata = Objects.requireNonNull(wire.read(MetaDataField.metadata).typedMarshallable()); } else { //noinspection unchecked this.metadata = (T) Metadata.NoMeta.INSTANCE; } mappedWire = wireType.apply(mappedBytes); } finally { assert wire.endUse(); } } /** * @param wireType the wire type that is being used * @param mappedBytes used to mapped the data store file */ SingleTableStore(@NotNull final WireType wireType, @NotNull final MappedBytes mappedBytes, @NotNull final T metadata) { this.wireType = wireType; this.metadata = metadata; this.mappedBytes = mappedBytes; this.mappedFile = mappedBytes.mappedFile(); mappedWire = wireType.apply(mappedBytes); } public static <T, R> R doWithSharedLock(@NotNull final File file, @NotNull final Function<T, ? extends R> code, @NotNull final Supplier<T> target) { return doWithLock(file, code, target, true); } public static <T, R> R doWithExclusiveLock(@NotNull final File file, @NotNull final Function<T, ? extends R> code, @NotNull final Supplier<T> target) { return doWithLock(file, code, target, false); } // shared vs exclusive - see https://docs.oracle.com/javase/7/docs/api/java/nio/channels/FileChannel.html private static <T, R> R doWithLock(@NotNull final File file, @NotNull final Function<T, ? extends R> code, @NotNull final Supplier<T> target, final boolean shared) { final String type = shared ? "shared" : "exclusive"; final StandardOpenOption readOrWrite = shared ? StandardOpenOption.READ : StandardOpenOption.WRITE; final long timeoutAt = System.currentTimeMillis() + timeoutMS; final long startMs = System.currentTimeMillis(); try (final FileChannel channel = FileChannel.open(file.toPath(), readOrWrite)) { for (int count = 1; System.currentTimeMillis() < timeoutAt; count++) { try (FileLock fileLock = channel.tryLock(0L, Long.MAX_VALUE, shared)) { if (fileLock != null) { return code.apply(target.get()); } } catch (IOException | OverlappingFileLockException e) { // failed to acquire the lock, wait until other operation completes if (count > 9) { if (Jvm.isDebugEnabled(SingleTableStore.class)) { final long elapsedMs = System.currentTimeMillis() - startMs; final String message = "Failed to acquire " + type + " lock on the table store file. Retrying, file=" + file.getAbsolutePath() + ", count=" + count + ", elapsed=" + elapsedMs + " ms"; Jvm.debug().on(SingleTableStore.class, "", new StackTrace(message)); } } } int delay = Math.min(250, count * count); sleep(delay, MILLISECONDS); } } catch (IOException e) { throw new IllegalStateException("Couldn't perform operation with " + type + " file lock", e); } throw new IllegalStateException("Unable to claim exclusive " + type + " lock on file " + file); } @NotNull @Override public File file() { return mappedFile.file(); } @NotNull @Override public String dump() { return dump(false); } private String dump(final boolean abbrev) { final MappedBytes bytes = MappedBytes.mappedBytes(mappedFile); try { bytes.readLimit(bytes.realCapacity()); return Wires.fromSizePrefixedBlobs(bytes, abbrev); } finally { bytes.releaseLast(); } } @NotNull @Override public String shortDump() { throwExceptionIfClosed(); return dump(true); } @Override protected void performClose() { mappedBytes.releaseLast(); } /** * @return creates a new instance of mapped bytes, because, for example the tailer and appender can be at different locations. */ @NotNull @Override public MappedBytes bytes() { throwExceptionIfClosed(); return MappedBytes.mappedBytes(mappedFile); } @NotNull @Override public String toString() { return getClass().getSimpleName() + "{" + "wireType=" + wireType + ", mappedFile=" + mappedFile + '}'; } // ************************************************************************* // Marshalling // ************************************************************************* private void onCleanup() { mappedBytes.releaseLast(); } @Override public void writeMarshallable(@NotNull final WireOut wire) { wire.write(MetaDataField.wireType).object(wireType); if (metadata != Metadata.NoMeta.INSTANCE) wire.write(MetaDataField.metadata).typedMarshallable(this.metadata); // align to a word whether needed or not as a micro-optimisation. wire.writeAlignTo(Integer.BYTES, 0); } /** * {@inheritDoc} */ @Override public synchronized LongValue acquireValueFor(CharSequence key, final long defaultValue) { // TODO Change to ThreadLocal values if performance is a problem. if (mappedBytes.isClosed()) throw new ClosedIllegalStateException("Closed"); final StringBuilder sb = Wires.acquireStringBuilder(); mappedBytes.reserve(this); try { mappedBytes.readPosition(0); mappedBytes.readLimit(mappedBytes.realCapacity()); while (mappedWire.readDataHeader()) { final int header = mappedBytes.readVolatileInt(); if (Wires.isNotComplete(header)) break; final long readPosition = mappedBytes.readPosition(); final int length = Wires.lengthOf(header); final ValueIn valueIn = mappedWire.readEventName(sb); if (StringUtils.equalsCaseIgnore(key, sb)) { return valueIn.int64ForBinding(null); } mappedBytes.readPosition(readPosition + length); } // not found final int safeLength = Maths.toUInt31(mappedBytes.realCapacity() - mappedBytes.readPosition()); mappedBytes.writeLimit(mappedBytes.realCapacity()); long start = mappedBytes.readPosition(); mappedBytes.writePosition(start); final long pos = mappedWire.enterHeader(safeLength); final LongValue longValue = wireType.newLongReference().get(); mappedWire.writeEventName(key).int64forBinding(defaultValue, longValue); mappedWire.writeAlignTo(Integer.BYTES, 0); mappedWire.updateHeader(pos, false, 0); long end = mappedBytes.writePosition(); long chuckSize = mappedFile.chunkSize(); long overlapSize = mappedFile.overlapSize(); long endOfChunk = (start + chuckSize - 1) / chuckSize * chuckSize; if (end >= endOfChunk + overlapSize) throw new IllegalStateException("Misaligned write"); return longValue; } catch (StreamCorruptedException | EOFException e) { throw new IORuntimeException(e); } finally { mappedBytes.release(this); } } @Override public synchronized <T> void forEachKey(T accumulator, TableStoreIterator<T> tsIterator) { final StringBuilder sb = Wires.acquireStringBuilder(); mappedBytes.reserve(this); try { mappedBytes.readPosition(0); mappedBytes.readLimit(mappedBytes.realCapacity()); while (mappedWire.readDataHeader()) { final int header = mappedBytes.readVolatileInt(); if (Wires.isNotComplete(header)) break; final long readPosition = mappedBytes.readPosition(); final int length = Wires.lengthOf(header); final ValueIn valueIn = mappedWire.readEventName(sb); tsIterator.accept(accumulator, sb, valueIn); mappedBytes.readPosition(readPosition + length); } } catch (EOFException e) { throw new IORuntimeException(e); } finally { mappedBytes.release(this); } } /** * {@inheritDoc} */ @Override public <R> R doWithExclusiveLock(@NotNull final Function<TableStore<T>, ? extends R> code) { return doWithExclusiveLock(file(), code, () -> this); } @Override public T metadata() { return metadata; } @Override protected boolean threadSafetyCheck(final boolean isUsed) { // TableStore are thread safe return true; } }
/* * Copyright 2020 Eric Medvet <eric.medvet@gmail.com> (as eric) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.units.malelab.jgea.problem.mapper; import com.google.common.collect.Range; import it.units.malelab.jgea.representation.grammar.ge.HierarchicalMapper; import it.units.malelab.jgea.representation.sequence.bit.BitString; import it.units.malelab.jgea.representation.tree.Tree; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; /** * @author eric */ public class MapperUtils { private static List apply(Element.MapperFunction function, List inputList, Object arg) { List outputList = new ArrayList(inputList.size()); for (Object repeatedArg : inputList) { switch (function) { case SIZE: outputList.add((double) ((BitString) repeatedArg).size()); break; case WEIGHT: outputList.add((double) ((BitString) repeatedArg).count()); break; case WEIGHT_R: outputList.add((double) ((BitString) repeatedArg).count() / (double) ((BitString) repeatedArg).size()); break; case INT: outputList.add((double) ((BitString) repeatedArg).toInt()); break; case ROTATE_SX: outputList.add(rotateSx((BitString) arg, ((Double) repeatedArg).intValue())); break; case ROTATE_DX: outputList.add(rotateDx((BitString) arg, ((Double) repeatedArg).intValue())); break; case SUBSTRING: outputList.add(substring((BitString) arg, ((Double) repeatedArg).intValue())); break; } } return outputList; } public static Object compute( Tree<Element> tree, BitString g, List<Double> values, int depth, AtomicInteger globalCounter ) { Object result = null; if (tree.content() instanceof Element.Variable) { switch (((Element.Variable) tree.content())) { case GENOTYPE: result = g; break; case LIST_N: result = values; break; case DEPTH: result = (double) depth; break; case GL_COUNT_R: result = (double) globalCounter.get(); break; case GL_COUNT_RW: result = (double) globalCounter.getAndIncrement(); break; } } else if (tree.content() instanceof Element.MapperFunction) { switch (((Element.MapperFunction) tree.content())) { case SIZE: result = (double) ((BitString) compute(tree.child(0), g, values, depth, globalCounter)).size(); break; case WEIGHT: result = (double) ((BitString) compute(tree.child(0), g, values, depth, globalCounter)).count(); break; case WEIGHT_R: BitString bitsGenotype = (BitString) compute(tree.child(0), g, values, depth, globalCounter); result = (double) bitsGenotype.count() / (double) bitsGenotype.size(); break; case INT: result = (double) ((BitString) compute(tree.child(0), g, values, depth, globalCounter)).toInt(); break; case ADD: result = ((Double) compute(tree.child(0), g, values, depth, globalCounter) + (Double) compute( tree.child(1), g, values, depth, globalCounter )); break; case SUBTRACT: result = ((Double) compute(tree.child(0), g, values, depth, globalCounter) - (Double) compute( tree.child(1), g, values, depth, globalCounter )); break; case MULT: result = ((Double) compute(tree.child(0), g, values, depth, globalCounter) * (Double) compute( tree.child(1), g, values, depth, globalCounter )); break; case DIVIDE: result = protectedDivision( (Double) compute(tree.child(0), g, values, depth, globalCounter), (Double) compute(tree.child(1), g, values, depth, globalCounter) ); break; case REMAINDER: result = protectedRemainder( (Double) compute(tree.child(0), g, values, depth, globalCounter), (Double) compute(tree.child(1), g, values, depth, globalCounter) ); break; case LENGTH: result = (double) ((List) compute(tree.child(0), g, values, depth, globalCounter)).size(); break; case MAX_INDEX: result = (double) maxIndex((List<Double>) compute(tree.child(0), g, values, depth, globalCounter), 1d); break; case MIN_INDEX: result = (double) maxIndex((List<Double>) compute(tree.child(0), g, values, depth, globalCounter), -1d); break; case GET: result = getFromList( (List) compute(tree.child(0), g, values, depth, globalCounter), ((Double) compute(tree.child(1), g, values, depth, globalCounter)).intValue() ); break; case SEQ: result = seq(((Double) compute(tree.child(0), g, values, depth, globalCounter)).intValue(), values.size()); break; case REPEAT: result = repeat(compute(tree.child(0), g, values, depth, globalCounter), ((Double) compute( tree.child(1), g, values, depth, globalCounter )).intValue(), values.size()); break; case ROTATE_SX: result = rotateSx( (BitString) compute(tree.child(0), g, values, depth, globalCounter), ((Double) compute(tree.child(1), g, values, depth, globalCounter)).intValue() ); break; case ROTATE_DX: result = rotateDx( (BitString) compute(tree.child(0), g, values, depth, globalCounter), ((Double) compute(tree.child(1), g, values, depth, globalCounter)).intValue() ); break; case SUBSTRING: result = substring( (BitString) compute(tree.child(0), g, values, depth, globalCounter), ((Double) compute(tree.child(1), g, values, depth, globalCounter)).intValue() ); break; case SPLIT: result = split( (BitString) compute(tree.child(0), g, values, depth, globalCounter), ((Double) compute(tree.child(1), g, values, depth, globalCounter)).intValue(), values.size() ); break; case SPLIT_W: result = splitWeighted( (BitString) compute(tree.child(0), g, values, depth, globalCounter), (List<Double>) compute(tree.child(1), g, values, depth, globalCounter), values.size() ); break; case APPLY: result = apply( (Element.MapperFunction) tree.child(0).content(), ((List) compute(tree.child(1), g, values, depth, globalCounter)), (tree.nChildren() >= 3) ? compute(tree.child(2), g, values, depth, globalCounter) : null ); break; } } else if (tree.content() instanceof Element.NumericConstant) { result = ((Element.NumericConstant) tree.content()).value(); } return result; } private static List concat(List l1, List l2) { List l = new ArrayList(l1); l.addAll(l2); return l; } private static Element fromString(String string) { try { double value = Double.parseDouble(string); return new Element.NumericConstant(value); } catch (NumberFormatException ex) { //just ignore } for (Element.Variable variable : Element.Variable.values()) { if (variable.getGrammarName().equals(string)) { return variable; } } for (Element.MapperFunction function : Element.MapperFunction.values()) { if (function.getGrammarName().equals(string)) { return function; } } return null; } private static <T> T getFromList(List<T> list, int n) { n = Math.min(n, list.size() - 1); n = Math.max(0, n); return list.get(n); } public static Tree<String> getGERawTree(int codonLength) { return node("<mapper>", node( "<n>", node("<fun_n_g>", node("int")), node("("), node("<g>", node("<fun_g_g,n>", node("substring")), node("("), node( "<g>", node("<fun_g_g,n>", node("rotate_sx")), node("("), node("<g>", node("<var_g>", node("g"))), node(","), node( "<n>", node("<fun_n_n,n>", node("*")), node("("), node("<n>", node("<var_n>", node("g_count_rw"))), node(","), node("<n>", node("<const_n>", node(Integer.toString(codonLength)))), node(")") ), node(")") ), node(","), node("<n>", node("<const_n>", node(Integer.toString(codonLength)))), node(")")), node(")") ), node( "<lg>", node("<fun_lg_g,n>", node("repeat")), node("("), node("<g>", node("<var_g>", node("g"))), node(","), node( "<n>", node("<fun_n_ln>", node("length")), node("("), node("<ln>", node("<var_ln>", node("ln"))), node(")") ), node(")") )); } public static Tree<String> getHGERawTree() { return node("<mapper>", node( "<n>", node("<fun_n_ln>", node("max_index")), node("("), node("<ln>", node("apply"), node("("), node("<fun_n_g>", node("weight_r")), node(","), node( "<lg>", node("<fun_lg_g,n>", node("split")), node("("), node("<g>", node("<var_g>", node("g"))), node(","), node( "<n>", node("<fun_n_ln>", node("length")), node("("), node("<ln>", node("<var_ln>", node("ln"))), node(")") ), node(")") ), node(")")), node(")") ), node( "<lg>", node("<fun_lg_g,n>", node("split")), node("("), node("<g>", node("<var_g>", node("g"))), node(","), node( "<n>", node("<fun_n_ln>", node("length")), node("("), node("<ln>", node("<var_ln>", node("ln"))), node(")") ), node(")") )); } public static Tree<String> getWHGERawTree() { return node("<mapper>", node( "<n>", node("<fun_n_ln>", node("max_index")), node("("), node("<ln>", node("apply"), node("("), node("<fun_n_g>", node("weight_r")), node(","), node( "<lg>", node("<fun_lg_g,n>", node("split")), node("("), node("<g>", node("<var_g>", node("g"))), node(","), node( "<n>", node("<fun_n_ln>", node("length")), node("("), node("<ln>", node("<var_ln>", node("ln"))), node(")") ), node(")") ), node(")")), node(")") ), node( "<lg>", node("<fun_lg_g,ln>", node("split_w")), node("("), node("<g>", node("<var_g>", node("g"))), node(","), node("<ln>", node("<var_ln>", node("ln"))), node(")") )); } private static List list(Object item) { List l = new ArrayList(1); l.add(item); return l; } private static int maxIndex(List<Double> list, double mult) { if (list.isEmpty()) { return 0; } int index = 0; for (int i = 1; i < list.size(); i++) { if (mult * list.get(i) > mult * list.get(index)) { index = i; } } return index; } private static <T> Tree<T> node(T content, Tree<T>... children) { Tree<T> tree = Tree.of(content); for (Tree<T> child : children) { tree.addChild(child); } return tree; } private static double protectedDivision(double d1, double d2) { if (d2 == 0) { return 0d; } return d1 / d2; } private static double protectedRemainder(double d1, double d2) { if (d2 == 0) { return 0d; } return d1 % d2; } private static <T> List<T> repeat(T element, int n, int maxN) { if (n <= 0) { return Collections.singletonList(element); } if (n > maxN) { n = maxN; } List<T> list = new ArrayList<>(n); for (int i = 0; i < n; i++) { list.add(element); } return list; } private static BitString rotateDx(BitString g, int n) { if (g.size() == 0) { return g; } n = n % g.size(); if (n <= 0) { return g; } BitString copy = new BitString(g.size()); copy.set(0, g.slice(g.size() - n, g.size())); copy.set(n, g.slice(0, g.size() - n)); return copy; } private static BitString rotateSx(BitString g, int n) { if (g.size() == 0) { return g; } n = n % g.size(); if (n <= 0) { return g; } BitString copy = new BitString(g.size()); copy.set(0, g.slice(n, g.size())); copy.set(g.size() - n, g.slice(0, n)); return copy; } private static List<Double> seq(int n, int maxN) { if (n > maxN) { n = maxN; } if (n < 1) { n = 1; } List<Double> list = new ArrayList<>(n); for (int i = 0; i < n; i++) { list.add((double) i); } return list; } private static List<BitString> split(BitString g, int n, int maxN) { if (n <= 0) { return Collections.singletonList(g); } if (n > maxN) { n = maxN; } if (g.size() == 0) { return Collections.nCopies(n, new BitString(0)); } n = Math.max(1, n); n = Math.min(n, g.size()); List<Range<Integer>> ranges = HierarchicalMapper.slices(Range.closedOpen(0, g.size()), n); return g.slices(ranges); } private static List<BitString> splitWeighted(BitString g, List<Double> weights, int maxN) { if (weights.isEmpty()) { return Collections.singletonList(g); } if (g.size() == 0) { return Collections.nCopies(weights.size(), new BitString(0)); } double minWeight = Double.POSITIVE_INFINITY; for (double w : weights) { if ((w < minWeight) && (w > 0)) { minWeight = w; } } if (Double.isInfinite(minWeight)) { //all zero return split(g, weights.size(), maxN); } List<Integer> intWeights = new ArrayList<>(weights.size()); for (double w : weights) { intWeights.add((int) Math.max(Math.round(w / minWeight), 0d)); } List<Range<Integer>> ranges = HierarchicalMapper.slices(Range.closedOpen(0, g.size()), intWeights); return g.slices(ranges); } private static BitString substring(BitString g, int to) { if (to <= 0) { return new BitString(0); } if (g.size() == 0) { return g; } return g.slice(0, Math.min(to, g.size())); } public static Tree<Element> transform(Tree<String> stringTree) { if (stringTree.isLeaf()) { Element element = fromString(stringTree.content()); if (element == null) { return null; } return Tree.of(element); } if (stringTree.nChildren() == 1) { return transform(stringTree.child(0)); } Tree<Element> tree = transform(stringTree.child(0)); for (int i = 1; i < stringTree.nChildren(); i++) { Tree<Element> child = transform(stringTree.child(i)); if (child != null) { //discard decorations tree.addChild(child); } } return tree; } }
package com.braintreegateway; public class PaymentMethodOptionsAdyenRequest extends Request { private PaymentMethodOptionsRequest parent; private Boolean overwriteBrand; private String selectedBrand; public PaymentMethodOptionsAdyenRequest() {} public PaymentMethodOptionsAdyenRequest(PaymentMethodOptionsRequest parent) { this.parent = parent; } public PaymentMethodOptionsRequest done() { return parent; } public PaymentMethodOptionsAdyenRequest overwriteBrand(Boolean overwriteBrand) { this.overwriteBrand = overwriteBrand; return this; } public PaymentMethodOptionsAdyenRequest selectedBrand(String selectedBrand) { this.selectedBrand = selectedBrand; return this; } @Override public String toXML() { return buildRequest("adyen").toXML(); } @Override public String toQueryString() { return toQueryString("adyen"); } @Override public String toQueryString(String root) { return buildRequest(root).toQueryString(); } protected RequestBuilder buildRequest(String root) { RequestBuilder builder = new RequestBuilder(root); builder.addElement("overwriteBrand", overwriteBrand); builder.addElement("selectedBrand", selectedBrand); return builder; } }
/** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package elephantdb.generated.keyval; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ElephantDB { public interface Iface extends elephantdb.generated.ElephantDBShared.Iface { public elephantdb.generated.Value get(String domain, ByteBuffer key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public elephantdb.generated.Value getString(String domain, String key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public elephantdb.generated.Value getInt(String domain, int key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public elephantdb.generated.Value getLong(String domain, long key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public List<elephantdb.generated.Value> multiGet(String domain, List<ByteBuffer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public List<elephantdb.generated.Value> multiGetString(String domain, List<String> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public List<elephantdb.generated.Value> multiGetInt(String domain, List<Integer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public List<elephantdb.generated.Value> multiGetLong(String domain, List<Long> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public List<elephantdb.generated.Value> directMultiGet(String domain, List<ByteBuffer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; public List<elephantdb.generated.Value> directKryoMultiGet(String domain, List<ByteBuffer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException; } public interface AsyncIface extends elephantdb.generated.ElephantDBShared .AsyncIface { public void get(String domain, ByteBuffer key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_call> resultHandler) throws org.apache.thrift.TException; public void getString(String domain, String key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getString_call> resultHandler) throws org.apache.thrift.TException; public void getInt(String domain, int key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getInt_call> resultHandler) throws org.apache.thrift.TException; public void getLong(String domain, long key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getLong_call> resultHandler) throws org.apache.thrift.TException; public void multiGet(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.multiGet_call> resultHandler) throws org.apache.thrift.TException; public void multiGetString(String domain, List<String> key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.multiGetString_call> resultHandler) throws org.apache.thrift.TException; public void multiGetInt(String domain, List<Integer> key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.multiGetInt_call> resultHandler) throws org.apache.thrift.TException; public void multiGetLong(String domain, List<Long> key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.multiGetLong_call> resultHandler) throws org.apache.thrift.TException; public void directMultiGet(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.directMultiGet_call> resultHandler) throws org.apache.thrift.TException; public void directKryoMultiGet(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.directKryoMultiGet_call> resultHandler) throws org.apache.thrift.TException; } public static class Client extends elephantdb.generated.ElephantDBShared.Client implements Iface { public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> { public Factory() {} public Client getClient(org.apache.thrift.protocol.TProtocol prot) { return new Client(prot); } public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { return new Client(iprot, oprot); } } public Client(org.apache.thrift.protocol.TProtocol prot) { super(prot, prot); } public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { super(iprot, oprot); } public elephantdb.generated.Value get(String domain, ByteBuffer key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_get(domain, key); return recv_get(); } public void send_get(String domain, ByteBuffer key) throws org.apache.thrift.TException { get_args args = new get_args(); args.set_domain(domain); args.set_key(key); sendBase("get", args); } public elephantdb.generated.Value recv_get() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { get_result result = new get_result(); receiveBase(result, "get"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "get failed: unknown result"); } public elephantdb.generated.Value getString(String domain, String key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_getString(domain, key); return recv_getString(); } public void send_getString(String domain, String key) throws org.apache.thrift.TException { getString_args args = new getString_args(); args.set_domain(domain); args.set_key(key); sendBase("getString", args); } public elephantdb.generated.Value recv_getString() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { getString_result result = new getString_result(); receiveBase(result, "getString"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getString failed: unknown result"); } public elephantdb.generated.Value getInt(String domain, int key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_getInt(domain, key); return recv_getInt(); } public void send_getInt(String domain, int key) throws org.apache.thrift.TException { getInt_args args = new getInt_args(); args.set_domain(domain); args.set_key(key); sendBase("getInt", args); } public elephantdb.generated.Value recv_getInt() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { getInt_result result = new getInt_result(); receiveBase(result, "getInt"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getInt failed: unknown result"); } public elephantdb.generated.Value getLong(String domain, long key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_getLong(domain, key); return recv_getLong(); } public void send_getLong(String domain, long key) throws org.apache.thrift.TException { getLong_args args = new getLong_args(); args.set_domain(domain); args.set_key(key); sendBase("getLong", args); } public elephantdb.generated.Value recv_getLong() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { getLong_result result = new getLong_result(); receiveBase(result, "getLong"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getLong failed: unknown result"); } public List<elephantdb.generated.Value> multiGet(String domain, List<ByteBuffer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_multiGet(domain, key); return recv_multiGet(); } public void send_multiGet(String domain, List<ByteBuffer> key) throws org.apache.thrift.TException { multiGet_args args = new multiGet_args(); args.set_domain(domain); args.set_key(key); sendBase("multiGet", args); } public List<elephantdb.generated.Value> recv_multiGet() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { multiGet_result result = new multiGet_result(); receiveBase(result, "multiGet"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "multiGet failed: unknown result"); } public List<elephantdb.generated.Value> multiGetString(String domain, List<String> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_multiGetString(domain, key); return recv_multiGetString(); } public void send_multiGetString(String domain, List<String> key) throws org.apache.thrift.TException { multiGetString_args args = new multiGetString_args(); args.set_domain(domain); args.set_key(key); sendBase("multiGetString", args); } public List<elephantdb.generated.Value> recv_multiGetString() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { multiGetString_result result = new multiGetString_result(); receiveBase(result, "multiGetString"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "multiGetString failed: unknown result"); } public List<elephantdb.generated.Value> multiGetInt(String domain, List<Integer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_multiGetInt(domain, key); return recv_multiGetInt(); } public void send_multiGetInt(String domain, List<Integer> key) throws org.apache.thrift.TException { multiGetInt_args args = new multiGetInt_args(); args.set_domain(domain); args.set_key(key); sendBase("multiGetInt", args); } public List<elephantdb.generated.Value> recv_multiGetInt() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { multiGetInt_result result = new multiGetInt_result(); receiveBase(result, "multiGetInt"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "multiGetInt failed: unknown result"); } public List<elephantdb.generated.Value> multiGetLong(String domain, List<Long> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_multiGetLong(domain, key); return recv_multiGetLong(); } public void send_multiGetLong(String domain, List<Long> key) throws org.apache.thrift.TException { multiGetLong_args args = new multiGetLong_args(); args.set_domain(domain); args.set_key(key); sendBase("multiGetLong", args); } public List<elephantdb.generated.Value> recv_multiGetLong() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { multiGetLong_result result = new multiGetLong_result(); receiveBase(result, "multiGetLong"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "multiGetLong failed: unknown result"); } public List<elephantdb.generated.Value> directMultiGet(String domain, List<ByteBuffer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_directMultiGet(domain, key); return recv_directMultiGet(); } public void send_directMultiGet(String domain, List<ByteBuffer> key) throws org.apache.thrift.TException { directMultiGet_args args = new directMultiGet_args(); args.set_domain(domain); args.set_key(key); sendBase("directMultiGet", args); } public List<elephantdb.generated.Value> recv_directMultiGet() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { directMultiGet_result result = new directMultiGet_result(); receiveBase(result, "directMultiGet"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "directMultiGet failed: unknown result"); } public List<elephantdb.generated.Value> directKryoMultiGet(String domain, List<ByteBuffer> key) throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { send_directKryoMultiGet(domain, key); return recv_directKryoMultiGet(); } public void send_directKryoMultiGet(String domain, List<ByteBuffer> key) throws org.apache.thrift.TException { directKryoMultiGet_args args = new directKryoMultiGet_args(); args.set_domain(domain); args.set_key(key); sendBase("directKryoMultiGet", args); } public List<elephantdb.generated.Value> recv_directKryoMultiGet() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { directKryoMultiGet_result result = new directKryoMultiGet_result(); receiveBase(result, "directKryoMultiGet"); if (result.is_set_success()) { return result.success; } if (result.dnfe != null) { throw result.dnfe; } if (result.hde != null) { throw result.hde; } if (result.dnle != null) { throw result.dnle; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "directKryoMultiGet failed: unknown result"); } } public static class AsyncClient extends elephantdb.generated.ElephantDBShared.AsyncClient implements AsyncIface { public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> { private org.apache.thrift.async.TAsyncClientManager clientManager; private org.apache.thrift.protocol.TProtocolFactory protocolFactory; public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) { this.clientManager = clientManager; this.protocolFactory = protocolFactory; } public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) { return new AsyncClient(protocolFactory, clientManager, transport); } } public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) { super(protocolFactory, clientManager, transport); } public void get(String domain, ByteBuffer key, org.apache.thrift.async.AsyncMethodCallback<get_call> resultHandler) throws org.apache.thrift.TException { checkReady(); get_call method_call = new get_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class get_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private ByteBuffer key; public get_call(String domain, ByteBuffer key, org.apache.thrift.async.AsyncMethodCallback<get_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("get", org.apache.thrift.protocol.TMessageType.CALL, 0)); get_args args = new get_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public elephantdb.generated.Value getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_get(); } } public void getString(String domain, String key, org.apache.thrift.async.AsyncMethodCallback<getString_call> resultHandler) throws org.apache.thrift.TException { checkReady(); getString_call method_call = new getString_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class getString_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private String key; public getString_call(String domain, String key, org.apache.thrift.async.AsyncMethodCallback<getString_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getString", org.apache.thrift.protocol.TMessageType.CALL, 0)); getString_args args = new getString_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public elephantdb.generated.Value getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_getString(); } } public void getInt(String domain, int key, org.apache.thrift.async.AsyncMethodCallback<getInt_call> resultHandler) throws org.apache.thrift.TException { checkReady(); getInt_call method_call = new getInt_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class getInt_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private int key; public getInt_call(String domain, int key, org.apache.thrift.async.AsyncMethodCallback<getInt_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getInt", org.apache.thrift.protocol.TMessageType.CALL, 0)); getInt_args args = new getInt_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public elephantdb.generated.Value getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_getInt(); } } public void getLong(String domain, long key, org.apache.thrift.async.AsyncMethodCallback<getLong_call> resultHandler) throws org.apache.thrift.TException { checkReady(); getLong_call method_call = new getLong_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class getLong_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private long key; public getLong_call(String domain, long key, org.apache.thrift.async.AsyncMethodCallback<getLong_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getLong", org.apache.thrift.protocol.TMessageType.CALL, 0)); getLong_args args = new getLong_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public elephantdb.generated.Value getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_getLong(); } } public void multiGet(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<multiGet_call> resultHandler) throws org.apache.thrift.TException { checkReady(); multiGet_call method_call = new multiGet_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class multiGet_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private List<ByteBuffer> key; public multiGet_call(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<multiGet_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("multiGet", org.apache.thrift.protocol.TMessageType.CALL, 0)); multiGet_args args = new multiGet_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public List<elephantdb.generated.Value> getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_multiGet(); } } public void multiGetString(String domain, List<String> key, org.apache.thrift.async.AsyncMethodCallback<multiGetString_call> resultHandler) throws org.apache.thrift.TException { checkReady(); multiGetString_call method_call = new multiGetString_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class multiGetString_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private List<String> key; public multiGetString_call(String domain, List<String> key, org.apache.thrift.async.AsyncMethodCallback<multiGetString_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("multiGetString", org.apache.thrift.protocol.TMessageType.CALL, 0)); multiGetString_args args = new multiGetString_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public List<elephantdb.generated.Value> getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_multiGetString(); } } public void multiGetInt(String domain, List<Integer> key, org.apache.thrift.async.AsyncMethodCallback<multiGetInt_call> resultHandler) throws org.apache.thrift.TException { checkReady(); multiGetInt_call method_call = new multiGetInt_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class multiGetInt_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private List<Integer> key; public multiGetInt_call(String domain, List<Integer> key, org.apache.thrift.async.AsyncMethodCallback<multiGetInt_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("multiGetInt", org.apache.thrift.protocol.TMessageType.CALL, 0)); multiGetInt_args args = new multiGetInt_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public List<elephantdb.generated.Value> getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_multiGetInt(); } } public void multiGetLong(String domain, List<Long> key, org.apache.thrift.async.AsyncMethodCallback<multiGetLong_call> resultHandler) throws org.apache.thrift.TException { checkReady(); multiGetLong_call method_call = new multiGetLong_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class multiGetLong_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private List<Long> key; public multiGetLong_call(String domain, List<Long> key, org.apache.thrift.async.AsyncMethodCallback<multiGetLong_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("multiGetLong", org.apache.thrift.protocol.TMessageType.CALL, 0)); multiGetLong_args args = new multiGetLong_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public List<elephantdb.generated.Value> getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_multiGetLong(); } } public void directMultiGet(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<directMultiGet_call> resultHandler) throws org.apache.thrift.TException { checkReady(); directMultiGet_call method_call = new directMultiGet_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class directMultiGet_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private List<ByteBuffer> key; public directMultiGet_call(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<directMultiGet_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("directMultiGet", org.apache.thrift.protocol.TMessageType.CALL, 0)); directMultiGet_args args = new directMultiGet_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public List<elephantdb.generated.Value> getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_directMultiGet(); } } public void directKryoMultiGet(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<directKryoMultiGet_call> resultHandler) throws org.apache.thrift.TException { checkReady(); directKryoMultiGet_call method_call = new directKryoMultiGet_call(domain, key, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class directKryoMultiGet_call extends org.apache.thrift.async.TAsyncMethodCall { private String domain; private List<ByteBuffer> key; public directKryoMultiGet_call(String domain, List<ByteBuffer> key, org.apache.thrift.async.AsyncMethodCallback<directKryoMultiGet_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.domain = domain; this.key = key; } public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("directKryoMultiGet", org.apache.thrift.protocol.TMessageType.CALL, 0)); directKryoMultiGet_args args = new directKryoMultiGet_args(); args.set_domain(domain); args.set_key(key); args.write(prot); prot.writeMessageEnd(); } public List<elephantdb.generated.Value> getResult() throws elephantdb.generated.DomainNotFoundException, elephantdb.generated.HostsDownException, elephantdb.generated.DomainNotLoadedException, org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_directKryoMultiGet(); } } } public static class Processor<I extends Iface> extends elephantdb.generated.ElephantDBShared.Processor implements org.apache.thrift.TProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName()); public Processor(I iface) { super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>())); } protected Processor(I iface, Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) { super(iface, getProcessMap(processMap)); } private static <I extends Iface> Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) { processMap.put("get", new get()); processMap.put("getString", new getString()); processMap.put("getInt", new getInt()); processMap.put("getLong", new getLong()); processMap.put("multiGet", new multiGet()); processMap.put("multiGetString", new multiGetString()); processMap.put("multiGetInt", new multiGetInt()); processMap.put("multiGetLong", new multiGetLong()); processMap.put("directMultiGet", new directMultiGet()); processMap.put("directKryoMultiGet", new directKryoMultiGet()); return processMap; } private static class get<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_args> { public get() { super("get"); } protected get_args getEmptyArgsInstance() { return new get_args(); } protected get_result getResult(I iface, get_args args) throws org.apache.thrift.TException { get_result result = new get_result(); try { result.success = iface.get(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class getString<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getString_args> { public getString() { super("getString"); } protected getString_args getEmptyArgsInstance() { return new getString_args(); } protected getString_result getResult(I iface, getString_args args) throws org.apache.thrift.TException { getString_result result = new getString_result(); try { result.success = iface.getString(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class getInt<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getInt_args> { public getInt() { super("getInt"); } protected getInt_args getEmptyArgsInstance() { return new getInt_args(); } protected getInt_result getResult(I iface, getInt_args args) throws org.apache.thrift.TException { getInt_result result = new getInt_result(); try { result.success = iface.getInt(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class getLong<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getLong_args> { public getLong() { super("getLong"); } protected getLong_args getEmptyArgsInstance() { return new getLong_args(); } protected getLong_result getResult(I iface, getLong_args args) throws org.apache.thrift.TException { getLong_result result = new getLong_result(); try { result.success = iface.getLong(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class multiGet<I extends Iface> extends org.apache.thrift.ProcessFunction<I, multiGet_args> { public multiGet() { super("multiGet"); } protected multiGet_args getEmptyArgsInstance() { return new multiGet_args(); } protected multiGet_result getResult(I iface, multiGet_args args) throws org.apache.thrift.TException { multiGet_result result = new multiGet_result(); try { result.success = iface.multiGet(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class multiGetString<I extends Iface> extends org.apache.thrift.ProcessFunction<I, multiGetString_args> { public multiGetString() { super("multiGetString"); } protected multiGetString_args getEmptyArgsInstance() { return new multiGetString_args(); } protected multiGetString_result getResult(I iface, multiGetString_args args) throws org.apache.thrift.TException { multiGetString_result result = new multiGetString_result(); try { result.success = iface.multiGetString(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class multiGetInt<I extends Iface> extends org.apache.thrift.ProcessFunction<I, multiGetInt_args> { public multiGetInt() { super("multiGetInt"); } protected multiGetInt_args getEmptyArgsInstance() { return new multiGetInt_args(); } protected multiGetInt_result getResult(I iface, multiGetInt_args args) throws org.apache.thrift.TException { multiGetInt_result result = new multiGetInt_result(); try { result.success = iface.multiGetInt(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class multiGetLong<I extends Iface> extends org.apache.thrift.ProcessFunction<I, multiGetLong_args> { public multiGetLong() { super("multiGetLong"); } protected multiGetLong_args getEmptyArgsInstance() { return new multiGetLong_args(); } protected multiGetLong_result getResult(I iface, multiGetLong_args args) throws org.apache.thrift.TException { multiGetLong_result result = new multiGetLong_result(); try { result.success = iface.multiGetLong(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class directMultiGet<I extends Iface> extends org.apache.thrift.ProcessFunction<I, directMultiGet_args> { public directMultiGet() { super("directMultiGet"); } protected directMultiGet_args getEmptyArgsInstance() { return new directMultiGet_args(); } protected directMultiGet_result getResult(I iface, directMultiGet_args args) throws org.apache.thrift.TException { directMultiGet_result result = new directMultiGet_result(); try { result.success = iface.directMultiGet(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } private static class directKryoMultiGet<I extends Iface> extends org.apache.thrift.ProcessFunction<I, directKryoMultiGet_args> { public directKryoMultiGet() { super("directKryoMultiGet"); } protected directKryoMultiGet_args getEmptyArgsInstance() { return new directKryoMultiGet_args(); } protected directKryoMultiGet_result getResult(I iface, directKryoMultiGet_args args) throws org.apache.thrift.TException { directKryoMultiGet_result result = new directKryoMultiGet_result(); try { result.success = iface.directKryoMultiGet(args.domain, args.key); } catch (elephantdb.generated.DomainNotFoundException dnfe) { result.dnfe = dnfe; } catch (elephantdb.generated.HostsDownException hde) { result.hde = hde; } catch (elephantdb.generated.DomainNotLoadedException dnle) { result.dnle = dnle; } return result; } } } public static class get_args implements org.apache.thrift.TBase<get_args, get_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("get_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.STRING, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new get_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new get_argsTupleSchemeFactory()); } private String domain; // required private ByteBuffer key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(get_args.class, metaDataMap); } public get_args() { } public get_args( String domain, ByteBuffer key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public get_args(get_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { this.key = org.apache.thrift.TBaseHelper.copyBinary(other.key); ; } } public get_args deepCopy() { return new get_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public byte[] get_key() { set_key(org.apache.thrift.TBaseHelper.rightSize(key)); return key == null ? null : key.array(); } public ByteBuffer buffer_for_key() { return key; } public void set_key(byte[] key) { set_key(key == null ? (ByteBuffer)null : ByteBuffer.wrap(key)); } public void set_key(ByteBuffer key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((ByteBuffer)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof get_args) return this.equals((get_args)that); return false; } public boolean equals(get_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(get_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; get_args typedOther = (get_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("get_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { org.apache.thrift.TBaseHelper.toString(this.key, sb); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class get_argsStandardSchemeFactory implements SchemeFactory { public get_argsStandardScheme getScheme() { return new get_argsStandardScheme(); } } private static class get_argsStandardScheme extends StandardScheme<get_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, get_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.key = iprot.readBinary(); struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, get_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); oprot.writeBinary(struct.key); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class get_argsTupleSchemeFactory implements SchemeFactory { public get_argsTupleScheme getScheme() { return new get_argsTupleScheme(); } } private static class get_argsTupleScheme extends TupleScheme<get_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, get_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { oprot.writeBinary(struct.key); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, get_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { struct.key = iprot.readBinary(); struct.set_key_isSet(true); } } } } public static class get_result implements org.apache.thrift.TBase<get_result, get_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("get_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new get_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new get_resultTupleSchemeFactory()); } private elephantdb.generated.Value success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(get_result.class, metaDataMap); } public get_result() { } public get_result( elephantdb.generated.Value success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public get_result(get_result other) { if (other.is_set_success()) { this.success = new elephantdb.generated.Value(other.success); } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public get_result deepCopy() { return new get_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public elephantdb.generated.Value get_success() { return this.success; } public void set_success(elephantdb.generated.Value success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((elephantdb.generated.Value)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof get_result) return this.equals((get_result)that); return false; } public boolean equals(get_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(get_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; get_result typedOther = (get_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("get_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class get_resultStandardSchemeFactory implements SchemeFactory { public get_resultStandardScheme getScheme() { return new get_resultStandardScheme(); } } private static class get_resultStandardScheme extends StandardScheme<get_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, get_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, get_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); struct.success.write(oprot); oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class get_resultTupleSchemeFactory implements SchemeFactory { public get_resultTupleScheme getScheme() { return new get_resultTupleScheme(); } } private static class get_resultTupleScheme extends TupleScheme<get_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, get_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { struct.success.write(oprot); } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, get_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class getString_args implements org.apache.thrift.TBase<getString_args, getString_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getString_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.STRING, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new getString_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new getString_argsTupleSchemeFactory()); } private String domain; // required private String key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getString_args.class, metaDataMap); } public getString_args() { } public getString_args( String domain, String key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public getString_args(getString_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { this.key = other.key; } } public getString_args deepCopy() { return new getString_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public String get_key() { return this.key; } public void set_key(String key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((String)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof getString_args) return this.equals((getString_args)that); return false; } public boolean equals(getString_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(getString_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; getString_args typedOther = (getString_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("getString_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { sb.append(this.key); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class getString_argsStandardSchemeFactory implements SchemeFactory { public getString_argsStandardScheme getScheme() { return new getString_argsStandardScheme(); } } private static class getString_argsStandardScheme extends StandardScheme<getString_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, getString_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.key = iprot.readString(); struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, getString_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); oprot.writeString(struct.key); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class getString_argsTupleSchemeFactory implements SchemeFactory { public getString_argsTupleScheme getScheme() { return new getString_argsTupleScheme(); } } private static class getString_argsTupleScheme extends TupleScheme<getString_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getString_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { oprot.writeString(struct.key); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getString_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { struct.key = iprot.readString(); struct.set_key_isSet(true); } } } } public static class getString_result implements org.apache.thrift.TBase<getString_result, getString_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getString_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new getString_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new getString_resultTupleSchemeFactory()); } private elephantdb.generated.Value success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getString_result.class, metaDataMap); } public getString_result() { } public getString_result( elephantdb.generated.Value success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public getString_result(getString_result other) { if (other.is_set_success()) { this.success = new elephantdb.generated.Value(other.success); } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public getString_result deepCopy() { return new getString_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public elephantdb.generated.Value get_success() { return this.success; } public void set_success(elephantdb.generated.Value success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((elephantdb.generated.Value)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof getString_result) return this.equals((getString_result)that); return false; } public boolean equals(getString_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(getString_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; getString_result typedOther = (getString_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("getString_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class getString_resultStandardSchemeFactory implements SchemeFactory { public getString_resultStandardScheme getScheme() { return new getString_resultStandardScheme(); } } private static class getString_resultStandardScheme extends StandardScheme<getString_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, getString_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, getString_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); struct.success.write(oprot); oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class getString_resultTupleSchemeFactory implements SchemeFactory { public getString_resultTupleScheme getScheme() { return new getString_resultTupleScheme(); } } private static class getString_resultTupleScheme extends TupleScheme<getString_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getString_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { struct.success.write(oprot); } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getString_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class getInt_args implements org.apache.thrift.TBase<getInt_args, getInt_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getInt_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.I32, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new getInt_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new getInt_argsTupleSchemeFactory()); } private String domain; // required private int key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __KEY_ISSET_ID = 0; private BitSet __isset_bit_vector = new BitSet(1); public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getInt_args.class, metaDataMap); } public getInt_args() { } public getInt_args( String domain, int key) { this(); this.domain = domain; this.key = key; set_key_isSet(true); } /** * Performs a deep copy on <i>other</i>. */ public getInt_args(getInt_args other) { __isset_bit_vector.clear(); __isset_bit_vector.or(other.__isset_bit_vector); if (other.is_set_domain()) { this.domain = other.domain; } this.key = other.key; } public getInt_args deepCopy() { return new getInt_args(this); } @Override public void clear() { this.domain = null; set_key_isSet(false); this.key = 0; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public int get_key() { return this.key; } public void set_key(int key) { this.key = key; set_key_isSet(true); } public void unset_key() { __isset_bit_vector.clear(__KEY_ISSET_ID); } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return __isset_bit_vector.get(__KEY_ISSET_ID); } public void set_key_isSet(boolean value) { __isset_bit_vector.set(__KEY_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((Integer)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return Integer.valueOf(get_key()); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof getInt_args) return this.equals((getInt_args)that); return false; } public boolean equals(getInt_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true; boolean that_present_key = true; if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (this.key != that.key) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true; builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(getInt_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; getInt_args typedOther = (getInt_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("getInt_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); sb.append(this.key); first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bit_vector = new BitSet(1); read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class getInt_argsStandardSchemeFactory implements SchemeFactory { public getInt_argsStandardScheme getScheme() { return new getInt_argsStandardScheme(); } } private static class getInt_argsStandardScheme extends StandardScheme<getInt_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, getInt_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.key = iprot.readI32(); struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, getInt_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } oprot.writeFieldBegin(KEY_FIELD_DESC); oprot.writeI32(struct.key); oprot.writeFieldEnd(); oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class getInt_argsTupleSchemeFactory implements SchemeFactory { public getInt_argsTupleScheme getScheme() { return new getInt_argsTupleScheme(); } } private static class getInt_argsTupleScheme extends TupleScheme<getInt_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getInt_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { oprot.writeI32(struct.key); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getInt_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { struct.key = iprot.readI32(); struct.set_key_isSet(true); } } } } public static class getInt_result implements org.apache.thrift.TBase<getInt_result, getInt_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getInt_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new getInt_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new getInt_resultTupleSchemeFactory()); } private elephantdb.generated.Value success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getInt_result.class, metaDataMap); } public getInt_result() { } public getInt_result( elephantdb.generated.Value success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public getInt_result(getInt_result other) { if (other.is_set_success()) { this.success = new elephantdb.generated.Value(other.success); } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public getInt_result deepCopy() { return new getInt_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public elephantdb.generated.Value get_success() { return this.success; } public void set_success(elephantdb.generated.Value success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((elephantdb.generated.Value)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof getInt_result) return this.equals((getInt_result)that); return false; } public boolean equals(getInt_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(getInt_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; getInt_result typedOther = (getInt_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("getInt_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class getInt_resultStandardSchemeFactory implements SchemeFactory { public getInt_resultStandardScheme getScheme() { return new getInt_resultStandardScheme(); } } private static class getInt_resultStandardScheme extends StandardScheme<getInt_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, getInt_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, getInt_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); struct.success.write(oprot); oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class getInt_resultTupleSchemeFactory implements SchemeFactory { public getInt_resultTupleScheme getScheme() { return new getInt_resultTupleScheme(); } } private static class getInt_resultTupleScheme extends TupleScheme<getInt_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getInt_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { struct.success.write(oprot); } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getInt_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class getLong_args implements org.apache.thrift.TBase<getLong_args, getLong_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getLong_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.I64, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new getLong_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new getLong_argsTupleSchemeFactory()); } private String domain; // required private long key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __KEY_ISSET_ID = 0; private BitSet __isset_bit_vector = new BitSet(1); public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getLong_args.class, metaDataMap); } public getLong_args() { } public getLong_args( String domain, long key) { this(); this.domain = domain; this.key = key; set_key_isSet(true); } /** * Performs a deep copy on <i>other</i>. */ public getLong_args(getLong_args other) { __isset_bit_vector.clear(); __isset_bit_vector.or(other.__isset_bit_vector); if (other.is_set_domain()) { this.domain = other.domain; } this.key = other.key; } public getLong_args deepCopy() { return new getLong_args(this); } @Override public void clear() { this.domain = null; set_key_isSet(false); this.key = 0; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public long get_key() { return this.key; } public void set_key(long key) { this.key = key; set_key_isSet(true); } public void unset_key() { __isset_bit_vector.clear(__KEY_ISSET_ID); } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return __isset_bit_vector.get(__KEY_ISSET_ID); } public void set_key_isSet(boolean value) { __isset_bit_vector.set(__KEY_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((Long)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return Long.valueOf(get_key()); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof getLong_args) return this.equals((getLong_args)that); return false; } public boolean equals(getLong_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true; boolean that_present_key = true; if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (this.key != that.key) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true; builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(getLong_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; getLong_args typedOther = (getLong_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("getLong_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); sb.append(this.key); first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bit_vector = new BitSet(1); read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class getLong_argsStandardSchemeFactory implements SchemeFactory { public getLong_argsStandardScheme getScheme() { return new getLong_argsStandardScheme(); } } private static class getLong_argsStandardScheme extends StandardScheme<getLong_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, getLong_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.key = iprot.readI64(); struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, getLong_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } oprot.writeFieldBegin(KEY_FIELD_DESC); oprot.writeI64(struct.key); oprot.writeFieldEnd(); oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class getLong_argsTupleSchemeFactory implements SchemeFactory { public getLong_argsTupleScheme getScheme() { return new getLong_argsTupleScheme(); } } private static class getLong_argsTupleScheme extends TupleScheme<getLong_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getLong_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { oprot.writeI64(struct.key); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getLong_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { struct.key = iprot.readI64(); struct.set_key_isSet(true); } } } } public static class getLong_result implements org.apache.thrift.TBase<getLong_result, getLong_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getLong_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new getLong_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new getLong_resultTupleSchemeFactory()); } private elephantdb.generated.Value success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getLong_result.class, metaDataMap); } public getLong_result() { } public getLong_result( elephantdb.generated.Value success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public getLong_result(getLong_result other) { if (other.is_set_success()) { this.success = new elephantdb.generated.Value(other.success); } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public getLong_result deepCopy() { return new getLong_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public elephantdb.generated.Value get_success() { return this.success; } public void set_success(elephantdb.generated.Value success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((elephantdb.generated.Value)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof getLong_result) return this.equals((getLong_result)that); return false; } public boolean equals(getLong_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(getLong_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; getLong_result typedOther = (getLong_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("getLong_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class getLong_resultStandardSchemeFactory implements SchemeFactory { public getLong_resultStandardScheme getScheme() { return new getLong_resultStandardScheme(); } } private static class getLong_resultStandardScheme extends StandardScheme<getLong_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, getLong_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, getLong_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); struct.success.write(oprot); oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class getLong_resultTupleSchemeFactory implements SchemeFactory { public getLong_resultTupleScheme getScheme() { return new getLong_resultTupleScheme(); } } private static class getLong_resultTupleScheme extends TupleScheme<getLong_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getLong_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { struct.success.write(oprot); } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getLong_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { struct.success = new elephantdb.generated.Value(); struct.success.read(iprot); struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class multiGet_args implements org.apache.thrift.TBase<multiGet_args, multiGet_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGet_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGet_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGet_argsTupleSchemeFactory()); } private String domain; // required private List<ByteBuffer> key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGet_args.class, metaDataMap); } public multiGet_args() { } public multiGet_args( String domain, List<ByteBuffer> key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public multiGet_args(multiGet_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { List<ByteBuffer> __this__key = new ArrayList<ByteBuffer>(); for (ByteBuffer other_element : other.key) { ByteBuffer temp_binary_element = org.apache.thrift.TBaseHelper.copyBinary(other_element); ; __this__key.add(temp_binary_element); } this.key = __this__key; } } public multiGet_args deepCopy() { return new multiGet_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public int get_key_size() { return (this.key == null) ? 0 : this.key.size(); } public java.util.Iterator<ByteBuffer> get_key_iterator() { return (this.key == null) ? null : this.key.iterator(); } public void add_to_key(ByteBuffer elem) { if (this.key == null) { this.key = new ArrayList<ByteBuffer>(); } this.key.add(elem); } public List<ByteBuffer> get_key() { return this.key; } public void set_key(List<ByteBuffer> key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((List<ByteBuffer>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGet_args) return this.equals((multiGet_args)that); return false; } public boolean equals(multiGet_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(multiGet_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGet_args typedOther = (multiGet_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGet_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { sb.append(this.key); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGet_argsStandardSchemeFactory implements SchemeFactory { public multiGet_argsStandardScheme getScheme() { return new multiGet_argsStandardScheme(); } } private static class multiGet_argsStandardScheme extends StandardScheme<multiGet_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGet_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list0 = iprot.readListBegin(); struct.key = new ArrayList<ByteBuffer>(_list0.size); for (int _i1 = 0; _i1 < _list0.size; ++_i1) { ByteBuffer _elem2; // required _elem2 = iprot.readBinary(); struct.key.add(_elem2); } iprot.readListEnd(); } struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGet_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.key.size())); for (ByteBuffer _iter3 : struct.key) { oprot.writeBinary(_iter3); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGet_argsTupleSchemeFactory implements SchemeFactory { public multiGet_argsTupleScheme getScheme() { return new multiGet_argsTupleScheme(); } } private static class multiGet_argsTupleScheme extends TupleScheme<multiGet_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGet_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { { oprot.writeI32(struct.key.size()); for (ByteBuffer _iter4 : struct.key) { oprot.writeBinary(_iter4); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGet_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list5 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.key = new ArrayList<ByteBuffer>(_list5.size); for (int _i6 = 0; _i6 < _list5.size; ++_i6) { ByteBuffer _elem7; // required _elem7 = iprot.readBinary(); struct.key.add(_elem7); } } struct.set_key_isSet(true); } } } } public static class multiGet_result implements org.apache.thrift.TBase<multiGet_result, multiGet_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGet_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGet_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGet_resultTupleSchemeFactory()); } private List<elephantdb.generated.Value> success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class)))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGet_result.class, metaDataMap); } public multiGet_result() { } public multiGet_result( List<elephantdb.generated.Value> success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public multiGet_result(multiGet_result other) { if (other.is_set_success()) { List<elephantdb.generated.Value> __this__success = new ArrayList<elephantdb.generated.Value>(); for (elephantdb.generated.Value other_element : other.success) { __this__success.add(new elephantdb.generated.Value(other_element)); } this.success = __this__success; } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public multiGet_result deepCopy() { return new multiGet_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public int get_success_size() { return (this.success == null) ? 0 : this.success.size(); } public java.util.Iterator<elephantdb.generated.Value> get_success_iterator() { return (this.success == null) ? null : this.success.iterator(); } public void add_to_success(elephantdb.generated.Value elem) { if (this.success == null) { this.success = new ArrayList<elephantdb.generated.Value>(); } this.success.add(elem); } public List<elephantdb.generated.Value> get_success() { return this.success; } public void set_success(List<elephantdb.generated.Value> success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((List<elephantdb.generated.Value>)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGet_result) return this.equals((multiGet_result)that); return false; } public boolean equals(multiGet_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(multiGet_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGet_result typedOther = (multiGet_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGet_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGet_resultStandardSchemeFactory implements SchemeFactory { public multiGet_resultStandardScheme getScheme() { return new multiGet_resultStandardScheme(); } } private static class multiGet_resultStandardScheme extends StandardScheme<multiGet_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGet_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list8 = iprot.readListBegin(); struct.success = new ArrayList<elephantdb.generated.Value>(_list8.size); for (int _i9 = 0; _i9 < _list8.size; ++_i9) { elephantdb.generated.Value _elem10; // required _elem10 = new elephantdb.generated.Value(); _elem10.read(iprot); struct.success.add(_elem10); } iprot.readListEnd(); } struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGet_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); for (elephantdb.generated.Value _iter11 : struct.success) { _iter11.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGet_resultTupleSchemeFactory implements SchemeFactory { public multiGet_resultTupleScheme getScheme() { return new multiGet_resultTupleScheme(); } } private static class multiGet_resultTupleScheme extends TupleScheme<multiGet_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGet_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { { oprot.writeI32(struct.success.size()); for (elephantdb.generated.Value _iter12 : struct.success) { _iter12.write(oprot); } } } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGet_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list13 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.success = new ArrayList<elephantdb.generated.Value>(_list13.size); for (int _i14 = 0; _i14 < _list13.size; ++_i14) { elephantdb.generated.Value _elem15; // required _elem15 = new elephantdb.generated.Value(); _elem15.read(iprot); struct.success.add(_elem15); } } struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class multiGetString_args implements org.apache.thrift.TBase<multiGetString_args, multiGetString_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGetString_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGetString_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGetString_argsTupleSchemeFactory()); } private String domain; // required private List<String> key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGetString_args.class, metaDataMap); } public multiGetString_args() { } public multiGetString_args( String domain, List<String> key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public multiGetString_args(multiGetString_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { List<String> __this__key = new ArrayList<String>(); for (String other_element : other.key) { __this__key.add(other_element); } this.key = __this__key; } } public multiGetString_args deepCopy() { return new multiGetString_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public int get_key_size() { return (this.key == null) ? 0 : this.key.size(); } public java.util.Iterator<String> get_key_iterator() { return (this.key == null) ? null : this.key.iterator(); } public void add_to_key(String elem) { if (this.key == null) { this.key = new ArrayList<String>(); } this.key.add(elem); } public List<String> get_key() { return this.key; } public void set_key(List<String> key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((List<String>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGetString_args) return this.equals((multiGetString_args)that); return false; } public boolean equals(multiGetString_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(multiGetString_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGetString_args typedOther = (multiGetString_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGetString_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { sb.append(this.key); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGetString_argsStandardSchemeFactory implements SchemeFactory { public multiGetString_argsStandardScheme getScheme() { return new multiGetString_argsStandardScheme(); } } private static class multiGetString_argsStandardScheme extends StandardScheme<multiGetString_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGetString_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list16 = iprot.readListBegin(); struct.key = new ArrayList<String>(_list16.size); for (int _i17 = 0; _i17 < _list16.size; ++_i17) { String _elem18; // required _elem18 = iprot.readString(); struct.key.add(_elem18); } iprot.readListEnd(); } struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGetString_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.key.size())); for (String _iter19 : struct.key) { oprot.writeString(_iter19); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGetString_argsTupleSchemeFactory implements SchemeFactory { public multiGetString_argsTupleScheme getScheme() { return new multiGetString_argsTupleScheme(); } } private static class multiGetString_argsTupleScheme extends TupleScheme<multiGetString_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGetString_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { { oprot.writeI32(struct.key.size()); for (String _iter20 : struct.key) { oprot.writeString(_iter20); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGetString_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list21 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.key = new ArrayList<String>(_list21.size); for (int _i22 = 0; _i22 < _list21.size; ++_i22) { String _elem23; // required _elem23 = iprot.readString(); struct.key.add(_elem23); } } struct.set_key_isSet(true); } } } } public static class multiGetString_result implements org.apache.thrift.TBase<multiGetString_result, multiGetString_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGetString_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGetString_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGetString_resultTupleSchemeFactory()); } private List<elephantdb.generated.Value> success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class)))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGetString_result.class, metaDataMap); } public multiGetString_result() { } public multiGetString_result( List<elephantdb.generated.Value> success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public multiGetString_result(multiGetString_result other) { if (other.is_set_success()) { List<elephantdb.generated.Value> __this__success = new ArrayList<elephantdb.generated.Value>(); for (elephantdb.generated.Value other_element : other.success) { __this__success.add(new elephantdb.generated.Value(other_element)); } this.success = __this__success; } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public multiGetString_result deepCopy() { return new multiGetString_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public int get_success_size() { return (this.success == null) ? 0 : this.success.size(); } public java.util.Iterator<elephantdb.generated.Value> get_success_iterator() { return (this.success == null) ? null : this.success.iterator(); } public void add_to_success(elephantdb.generated.Value elem) { if (this.success == null) { this.success = new ArrayList<elephantdb.generated.Value>(); } this.success.add(elem); } public List<elephantdb.generated.Value> get_success() { return this.success; } public void set_success(List<elephantdb.generated.Value> success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((List<elephantdb.generated.Value>)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGetString_result) return this.equals((multiGetString_result)that); return false; } public boolean equals(multiGetString_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(multiGetString_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGetString_result typedOther = (multiGetString_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGetString_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGetString_resultStandardSchemeFactory implements SchemeFactory { public multiGetString_resultStandardScheme getScheme() { return new multiGetString_resultStandardScheme(); } } private static class multiGetString_resultStandardScheme extends StandardScheme<multiGetString_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGetString_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list24 = iprot.readListBegin(); struct.success = new ArrayList<elephantdb.generated.Value>(_list24.size); for (int _i25 = 0; _i25 < _list24.size; ++_i25) { elephantdb.generated.Value _elem26; // required _elem26 = new elephantdb.generated.Value(); _elem26.read(iprot); struct.success.add(_elem26); } iprot.readListEnd(); } struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGetString_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); for (elephantdb.generated.Value _iter27 : struct.success) { _iter27.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGetString_resultTupleSchemeFactory implements SchemeFactory { public multiGetString_resultTupleScheme getScheme() { return new multiGetString_resultTupleScheme(); } } private static class multiGetString_resultTupleScheme extends TupleScheme<multiGetString_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGetString_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { { oprot.writeI32(struct.success.size()); for (elephantdb.generated.Value _iter28 : struct.success) { _iter28.write(oprot); } } } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGetString_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list29 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.success = new ArrayList<elephantdb.generated.Value>(_list29.size); for (int _i30 = 0; _i30 < _list29.size; ++_i30) { elephantdb.generated.Value _elem31; // required _elem31 = new elephantdb.generated.Value(); _elem31.read(iprot); struct.success.add(_elem31); } } struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class multiGetInt_args implements org.apache.thrift.TBase<multiGetInt_args, multiGetInt_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGetInt_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGetInt_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGetInt_argsTupleSchemeFactory()); } private String domain; // required private List<Integer> key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGetInt_args.class, metaDataMap); } public multiGetInt_args() { } public multiGetInt_args( String domain, List<Integer> key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public multiGetInt_args(multiGetInt_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { List<Integer> __this__key = new ArrayList<Integer>(); for (Integer other_element : other.key) { __this__key.add(other_element); } this.key = __this__key; } } public multiGetInt_args deepCopy() { return new multiGetInt_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public int get_key_size() { return (this.key == null) ? 0 : this.key.size(); } public java.util.Iterator<Integer> get_key_iterator() { return (this.key == null) ? null : this.key.iterator(); } public void add_to_key(int elem) { if (this.key == null) { this.key = new ArrayList<Integer>(); } this.key.add(elem); } public List<Integer> get_key() { return this.key; } public void set_key(List<Integer> key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((List<Integer>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGetInt_args) return this.equals((multiGetInt_args)that); return false; } public boolean equals(multiGetInt_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(multiGetInt_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGetInt_args typedOther = (multiGetInt_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGetInt_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { sb.append(this.key); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGetInt_argsStandardSchemeFactory implements SchemeFactory { public multiGetInt_argsStandardScheme getScheme() { return new multiGetInt_argsStandardScheme(); } } private static class multiGetInt_argsStandardScheme extends StandardScheme<multiGetInt_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGetInt_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list32 = iprot.readListBegin(); struct.key = new ArrayList<Integer>(_list32.size); for (int _i33 = 0; _i33 < _list32.size; ++_i33) { int _elem34; // required _elem34 = iprot.readI32(); struct.key.add(_elem34); } iprot.readListEnd(); } struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGetInt_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, struct.key.size())); for (int _iter35 : struct.key) { oprot.writeI32(_iter35); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGetInt_argsTupleSchemeFactory implements SchemeFactory { public multiGetInt_argsTupleScheme getScheme() { return new multiGetInt_argsTupleScheme(); } } private static class multiGetInt_argsTupleScheme extends TupleScheme<multiGetInt_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGetInt_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { { oprot.writeI32(struct.key.size()); for (int _iter36 : struct.key) { oprot.writeI32(_iter36); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGetInt_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list37 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, iprot.readI32()); struct.key = new ArrayList<Integer>(_list37.size); for (int _i38 = 0; _i38 < _list37.size; ++_i38) { int _elem39; // required _elem39 = iprot.readI32(); struct.key.add(_elem39); } } struct.set_key_isSet(true); } } } } public static class multiGetInt_result implements org.apache.thrift.TBase<multiGetInt_result, multiGetInt_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGetInt_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGetInt_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGetInt_resultTupleSchemeFactory()); } private List<elephantdb.generated.Value> success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class)))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGetInt_result.class, metaDataMap); } public multiGetInt_result() { } public multiGetInt_result( List<elephantdb.generated.Value> success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public multiGetInt_result(multiGetInt_result other) { if (other.is_set_success()) { List<elephantdb.generated.Value> __this__success = new ArrayList<elephantdb.generated.Value>(); for (elephantdb.generated.Value other_element : other.success) { __this__success.add(new elephantdb.generated.Value(other_element)); } this.success = __this__success; } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public multiGetInt_result deepCopy() { return new multiGetInt_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public int get_success_size() { return (this.success == null) ? 0 : this.success.size(); } public java.util.Iterator<elephantdb.generated.Value> get_success_iterator() { return (this.success == null) ? null : this.success.iterator(); } public void add_to_success(elephantdb.generated.Value elem) { if (this.success == null) { this.success = new ArrayList<elephantdb.generated.Value>(); } this.success.add(elem); } public List<elephantdb.generated.Value> get_success() { return this.success; } public void set_success(List<elephantdb.generated.Value> success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((List<elephantdb.generated.Value>)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGetInt_result) return this.equals((multiGetInt_result)that); return false; } public boolean equals(multiGetInt_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(multiGetInt_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGetInt_result typedOther = (multiGetInt_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGetInt_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGetInt_resultStandardSchemeFactory implements SchemeFactory { public multiGetInt_resultStandardScheme getScheme() { return new multiGetInt_resultStandardScheme(); } } private static class multiGetInt_resultStandardScheme extends StandardScheme<multiGetInt_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGetInt_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list40 = iprot.readListBegin(); struct.success = new ArrayList<elephantdb.generated.Value>(_list40.size); for (int _i41 = 0; _i41 < _list40.size; ++_i41) { elephantdb.generated.Value _elem42; // required _elem42 = new elephantdb.generated.Value(); _elem42.read(iprot); struct.success.add(_elem42); } iprot.readListEnd(); } struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGetInt_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); for (elephantdb.generated.Value _iter43 : struct.success) { _iter43.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGetInt_resultTupleSchemeFactory implements SchemeFactory { public multiGetInt_resultTupleScheme getScheme() { return new multiGetInt_resultTupleScheme(); } } private static class multiGetInt_resultTupleScheme extends TupleScheme<multiGetInt_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGetInt_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { { oprot.writeI32(struct.success.size()); for (elephantdb.generated.Value _iter44 : struct.success) { _iter44.write(oprot); } } } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGetInt_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list45 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.success = new ArrayList<elephantdb.generated.Value>(_list45.size); for (int _i46 = 0; _i46 < _list45.size; ++_i46) { elephantdb.generated.Value _elem47; // required _elem47 = new elephantdb.generated.Value(); _elem47.read(iprot); struct.success.add(_elem47); } } struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class multiGetLong_args implements org.apache.thrift.TBase<multiGetLong_args, multiGetLong_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGetLong_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGetLong_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGetLong_argsTupleSchemeFactory()); } private String domain; // required private List<Long> key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGetLong_args.class, metaDataMap); } public multiGetLong_args() { } public multiGetLong_args( String domain, List<Long> key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public multiGetLong_args(multiGetLong_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { List<Long> __this__key = new ArrayList<Long>(); for (Long other_element : other.key) { __this__key.add(other_element); } this.key = __this__key; } } public multiGetLong_args deepCopy() { return new multiGetLong_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public int get_key_size() { return (this.key == null) ? 0 : this.key.size(); } public java.util.Iterator<Long> get_key_iterator() { return (this.key == null) ? null : this.key.iterator(); } public void add_to_key(long elem) { if (this.key == null) { this.key = new ArrayList<Long>(); } this.key.add(elem); } public List<Long> get_key() { return this.key; } public void set_key(List<Long> key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((List<Long>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGetLong_args) return this.equals((multiGetLong_args)that); return false; } public boolean equals(multiGetLong_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(multiGetLong_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGetLong_args typedOther = (multiGetLong_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGetLong_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { sb.append(this.key); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGetLong_argsStandardSchemeFactory implements SchemeFactory { public multiGetLong_argsStandardScheme getScheme() { return new multiGetLong_argsStandardScheme(); } } private static class multiGetLong_argsStandardScheme extends StandardScheme<multiGetLong_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGetLong_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list48 = iprot.readListBegin(); struct.key = new ArrayList<Long>(_list48.size); for (int _i49 = 0; _i49 < _list48.size; ++_i49) { long _elem50; // required _elem50 = iprot.readI64(); struct.key.add(_elem50); } iprot.readListEnd(); } struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGetLong_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.key.size())); for (long _iter51 : struct.key) { oprot.writeI64(_iter51); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGetLong_argsTupleSchemeFactory implements SchemeFactory { public multiGetLong_argsTupleScheme getScheme() { return new multiGetLong_argsTupleScheme(); } } private static class multiGetLong_argsTupleScheme extends TupleScheme<multiGetLong_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGetLong_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { { oprot.writeI32(struct.key.size()); for (long _iter52 : struct.key) { oprot.writeI64(_iter52); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGetLong_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list53 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32()); struct.key = new ArrayList<Long>(_list53.size); for (int _i54 = 0; _i54 < _list53.size; ++_i54) { long _elem55; // required _elem55 = iprot.readI64(); struct.key.add(_elem55); } } struct.set_key_isSet(true); } } } } public static class multiGetLong_result implements org.apache.thrift.TBase<multiGetLong_result, multiGetLong_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("multiGetLong_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new multiGetLong_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new multiGetLong_resultTupleSchemeFactory()); } private List<elephantdb.generated.Value> success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class)))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(multiGetLong_result.class, metaDataMap); } public multiGetLong_result() { } public multiGetLong_result( List<elephantdb.generated.Value> success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public multiGetLong_result(multiGetLong_result other) { if (other.is_set_success()) { List<elephantdb.generated.Value> __this__success = new ArrayList<elephantdb.generated.Value>(); for (elephantdb.generated.Value other_element : other.success) { __this__success.add(new elephantdb.generated.Value(other_element)); } this.success = __this__success; } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public multiGetLong_result deepCopy() { return new multiGetLong_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public int get_success_size() { return (this.success == null) ? 0 : this.success.size(); } public java.util.Iterator<elephantdb.generated.Value> get_success_iterator() { return (this.success == null) ? null : this.success.iterator(); } public void add_to_success(elephantdb.generated.Value elem) { if (this.success == null) { this.success = new ArrayList<elephantdb.generated.Value>(); } this.success.add(elem); } public List<elephantdb.generated.Value> get_success() { return this.success; } public void set_success(List<elephantdb.generated.Value> success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((List<elephantdb.generated.Value>)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof multiGetLong_result) return this.equals((multiGetLong_result)that); return false; } public boolean equals(multiGetLong_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(multiGetLong_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; multiGetLong_result typedOther = (multiGetLong_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("multiGetLong_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class multiGetLong_resultStandardSchemeFactory implements SchemeFactory { public multiGetLong_resultStandardScheme getScheme() { return new multiGetLong_resultStandardScheme(); } } private static class multiGetLong_resultStandardScheme extends StandardScheme<multiGetLong_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, multiGetLong_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list56 = iprot.readListBegin(); struct.success = new ArrayList<elephantdb.generated.Value>(_list56.size); for (int _i57 = 0; _i57 < _list56.size; ++_i57) { elephantdb.generated.Value _elem58; // required _elem58 = new elephantdb.generated.Value(); _elem58.read(iprot); struct.success.add(_elem58); } iprot.readListEnd(); } struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, multiGetLong_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); for (elephantdb.generated.Value _iter59 : struct.success) { _iter59.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class multiGetLong_resultTupleSchemeFactory implements SchemeFactory { public multiGetLong_resultTupleScheme getScheme() { return new multiGetLong_resultTupleScheme(); } } private static class multiGetLong_resultTupleScheme extends TupleScheme<multiGetLong_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, multiGetLong_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { { oprot.writeI32(struct.success.size()); for (elephantdb.generated.Value _iter60 : struct.success) { _iter60.write(oprot); } } } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, multiGetLong_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list61 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.success = new ArrayList<elephantdb.generated.Value>(_list61.size); for (int _i62 = 0; _i62 < _list61.size; ++_i62) { elephantdb.generated.Value _elem63; // required _elem63 = new elephantdb.generated.Value(); _elem63.read(iprot); struct.success.add(_elem63); } } struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class directMultiGet_args implements org.apache.thrift.TBase<directMultiGet_args, directMultiGet_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("directMultiGet_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new directMultiGet_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new directMultiGet_argsTupleSchemeFactory()); } private String domain; // required private List<ByteBuffer> key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(directMultiGet_args.class, metaDataMap); } public directMultiGet_args() { } public directMultiGet_args( String domain, List<ByteBuffer> key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public directMultiGet_args(directMultiGet_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { List<ByteBuffer> __this__key = new ArrayList<ByteBuffer>(); for (ByteBuffer other_element : other.key) { ByteBuffer temp_binary_element = org.apache.thrift.TBaseHelper.copyBinary(other_element); ; __this__key.add(temp_binary_element); } this.key = __this__key; } } public directMultiGet_args deepCopy() { return new directMultiGet_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public int get_key_size() { return (this.key == null) ? 0 : this.key.size(); } public java.util.Iterator<ByteBuffer> get_key_iterator() { return (this.key == null) ? null : this.key.iterator(); } public void add_to_key(ByteBuffer elem) { if (this.key == null) { this.key = new ArrayList<ByteBuffer>(); } this.key.add(elem); } public List<ByteBuffer> get_key() { return this.key; } public void set_key(List<ByteBuffer> key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((List<ByteBuffer>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof directMultiGet_args) return this.equals((directMultiGet_args)that); return false; } public boolean equals(directMultiGet_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(directMultiGet_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; directMultiGet_args typedOther = (directMultiGet_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("directMultiGet_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { sb.append(this.key); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class directMultiGet_argsStandardSchemeFactory implements SchemeFactory { public directMultiGet_argsStandardScheme getScheme() { return new directMultiGet_argsStandardScheme(); } } private static class directMultiGet_argsStandardScheme extends StandardScheme<directMultiGet_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, directMultiGet_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list64 = iprot.readListBegin(); struct.key = new ArrayList<ByteBuffer>(_list64.size); for (int _i65 = 0; _i65 < _list64.size; ++_i65) { ByteBuffer _elem66; // required _elem66 = iprot.readBinary(); struct.key.add(_elem66); } iprot.readListEnd(); } struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, directMultiGet_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.key.size())); for (ByteBuffer _iter67 : struct.key) { oprot.writeBinary(_iter67); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class directMultiGet_argsTupleSchemeFactory implements SchemeFactory { public directMultiGet_argsTupleScheme getScheme() { return new directMultiGet_argsTupleScheme(); } } private static class directMultiGet_argsTupleScheme extends TupleScheme<directMultiGet_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, directMultiGet_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { { oprot.writeI32(struct.key.size()); for (ByteBuffer _iter68 : struct.key) { oprot.writeBinary(_iter68); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, directMultiGet_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list69 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.key = new ArrayList<ByteBuffer>(_list69.size); for (int _i70 = 0; _i70 < _list69.size; ++_i70) { ByteBuffer _elem71; // required _elem71 = iprot.readBinary(); struct.key.add(_elem71); } } struct.set_key_isSet(true); } } } } public static class directMultiGet_result implements org.apache.thrift.TBase<directMultiGet_result, directMultiGet_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("directMultiGet_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new directMultiGet_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new directMultiGet_resultTupleSchemeFactory()); } private List<elephantdb.generated.Value> success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class)))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(directMultiGet_result.class, metaDataMap); } public directMultiGet_result() { } public directMultiGet_result( List<elephantdb.generated.Value> success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public directMultiGet_result(directMultiGet_result other) { if (other.is_set_success()) { List<elephantdb.generated.Value> __this__success = new ArrayList<elephantdb.generated.Value>(); for (elephantdb.generated.Value other_element : other.success) { __this__success.add(new elephantdb.generated.Value(other_element)); } this.success = __this__success; } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public directMultiGet_result deepCopy() { return new directMultiGet_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public int get_success_size() { return (this.success == null) ? 0 : this.success.size(); } public java.util.Iterator<elephantdb.generated.Value> get_success_iterator() { return (this.success == null) ? null : this.success.iterator(); } public void add_to_success(elephantdb.generated.Value elem) { if (this.success == null) { this.success = new ArrayList<elephantdb.generated.Value>(); } this.success.add(elem); } public List<elephantdb.generated.Value> get_success() { return this.success; } public void set_success(List<elephantdb.generated.Value> success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((List<elephantdb.generated.Value>)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof directMultiGet_result) return this.equals((directMultiGet_result)that); return false; } public boolean equals(directMultiGet_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(directMultiGet_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; directMultiGet_result typedOther = (directMultiGet_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("directMultiGet_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class directMultiGet_resultStandardSchemeFactory implements SchemeFactory { public directMultiGet_resultStandardScheme getScheme() { return new directMultiGet_resultStandardScheme(); } } private static class directMultiGet_resultStandardScheme extends StandardScheme<directMultiGet_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, directMultiGet_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list72 = iprot.readListBegin(); struct.success = new ArrayList<elephantdb.generated.Value>(_list72.size); for (int _i73 = 0; _i73 < _list72.size; ++_i73) { elephantdb.generated.Value _elem74; // required _elem74 = new elephantdb.generated.Value(); _elem74.read(iprot); struct.success.add(_elem74); } iprot.readListEnd(); } struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, directMultiGet_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); for (elephantdb.generated.Value _iter75 : struct.success) { _iter75.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class directMultiGet_resultTupleSchemeFactory implements SchemeFactory { public directMultiGet_resultTupleScheme getScheme() { return new directMultiGet_resultTupleScheme(); } } private static class directMultiGet_resultTupleScheme extends TupleScheme<directMultiGet_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, directMultiGet_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { { oprot.writeI32(struct.success.size()); for (elephantdb.generated.Value _iter76 : struct.success) { _iter76.write(oprot); } } } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, directMultiGet_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list77 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.success = new ArrayList<elephantdb.generated.Value>(_list77.size); for (int _i78 = 0; _i78 < _list77.size; ++_i78) { elephantdb.generated.Value _elem79; // required _elem79 = new elephantdb.generated.Value(); _elem79.read(iprot); struct.success.add(_elem79); } } struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } public static class directKryoMultiGet_args implements org.apache.thrift.TBase<directKryoMultiGet_args, directKryoMultiGet_args._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("directKryoMultiGet_args"); private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new directKryoMultiGet_argsStandardSchemeFactory()); schemes.put(TupleScheme.class, new directKryoMultiGet_argsTupleSchemeFactory()); } private String domain; // required private List<ByteBuffer> key; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DOMAIN((short)1, "domain"), KEY((short)2, "key"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DOMAIN return DOMAIN; case 2: // KEY return KEY; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(directKryoMultiGet_args.class, metaDataMap); } public directKryoMultiGet_args() { } public directKryoMultiGet_args( String domain, List<ByteBuffer> key) { this(); this.domain = domain; this.key = key; } /** * Performs a deep copy on <i>other</i>. */ public directKryoMultiGet_args(directKryoMultiGet_args other) { if (other.is_set_domain()) { this.domain = other.domain; } if (other.is_set_key()) { List<ByteBuffer> __this__key = new ArrayList<ByteBuffer>(); for (ByteBuffer other_element : other.key) { ByteBuffer temp_binary_element = org.apache.thrift.TBaseHelper.copyBinary(other_element); ; __this__key.add(temp_binary_element); } this.key = __this__key; } } public directKryoMultiGet_args deepCopy() { return new directKryoMultiGet_args(this); } @Override public void clear() { this.domain = null; this.key = null; } public String get_domain() { return this.domain; } public void set_domain(String domain) { this.domain = domain; } public void unset_domain() { this.domain = null; } /** Returns true if field domain is set (has been assigned a value) and false otherwise */ public boolean is_set_domain() { return this.domain != null; } public void set_domain_isSet(boolean value) { if (!value) { this.domain = null; } } public int get_key_size() { return (this.key == null) ? 0 : this.key.size(); } public java.util.Iterator<ByteBuffer> get_key_iterator() { return (this.key == null) ? null : this.key.iterator(); } public void add_to_key(ByteBuffer elem) { if (this.key == null) { this.key = new ArrayList<ByteBuffer>(); } this.key.add(elem); } public List<ByteBuffer> get_key() { return this.key; } public void set_key(List<ByteBuffer> key) { this.key = key; } public void unset_key() { this.key = null; } /** Returns true if field key is set (has been assigned a value) and false otherwise */ public boolean is_set_key() { return this.key != null; } public void set_key_isSet(boolean value) { if (!value) { this.key = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case DOMAIN: if (value == null) { unset_domain(); } else { set_domain((String)value); } break; case KEY: if (value == null) { unset_key(); } else { set_key((List<ByteBuffer>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case DOMAIN: return get_domain(); case KEY: return get_key(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case DOMAIN: return is_set_domain(); case KEY: return is_set_key(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof directKryoMultiGet_args) return this.equals((directKryoMultiGet_args)that); return false; } public boolean equals(directKryoMultiGet_args that) { if (that == null) return false; boolean this_present_domain = true && this.is_set_domain(); boolean that_present_domain = true && that.is_set_domain(); if (this_present_domain || that_present_domain) { if (!(this_present_domain && that_present_domain)) return false; if (!this.domain.equals(that.domain)) return false; } boolean this_present_key = true && this.is_set_key(); boolean that_present_key = true && that.is_set_key(); if (this_present_key || that_present_key) { if (!(this_present_key && that_present_key)) return false; if (!this.key.equals(that.key)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_domain = true && (is_set_domain()); builder.append(present_domain); if (present_domain) builder.append(domain); boolean present_key = true && (is_set_key()); builder.append(present_key); if (present_key) builder.append(key); return builder.toHashCode(); } public int compareTo(directKryoMultiGet_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; directKryoMultiGet_args typedOther = (directKryoMultiGet_args)other; lastComparison = Boolean.valueOf(is_set_domain()).compareTo(typedOther.is_set_domain()); if (lastComparison != 0) { return lastComparison; } if (is_set_domain()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, typedOther.domain); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_key()).compareTo(typedOther.is_set_key()); if (lastComparison != 0) { return lastComparison; } if (is_set_key()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, typedOther.key); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("directKryoMultiGet_args("); boolean first = true; sb.append("domain:"); if (this.domain == null) { sb.append("null"); } else { sb.append(this.domain); } first = false; if (!first) sb.append(", "); sb.append("key:"); if (this.key == null) { sb.append("null"); } else { sb.append(this.key); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class directKryoMultiGet_argsStandardSchemeFactory implements SchemeFactory { public directKryoMultiGet_argsStandardScheme getScheme() { return new directKryoMultiGet_argsStandardScheme(); } } private static class directKryoMultiGet_argsStandardScheme extends StandardScheme<directKryoMultiGet_args> { public void read(org.apache.thrift.protocol.TProtocol iprot, directKryoMultiGet_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DOMAIN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // KEY if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list80 = iprot.readListBegin(); struct.key = new ArrayList<ByteBuffer>(_list80.size); for (int _i81 = 0; _i81 < _list80.size; ++_i81) { ByteBuffer _elem82; // required _elem82 = iprot.readBinary(); struct.key.add(_elem82); } iprot.readListEnd(); } struct.set_key_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, directKryoMultiGet_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.domain != null) { oprot.writeFieldBegin(DOMAIN_FIELD_DESC); oprot.writeString(struct.domain); oprot.writeFieldEnd(); } if (struct.key != null) { oprot.writeFieldBegin(KEY_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.key.size())); for (ByteBuffer _iter83 : struct.key) { oprot.writeBinary(_iter83); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class directKryoMultiGet_argsTupleSchemeFactory implements SchemeFactory { public directKryoMultiGet_argsTupleScheme getScheme() { return new directKryoMultiGet_argsTupleScheme(); } } private static class directKryoMultiGet_argsTupleScheme extends TupleScheme<directKryoMultiGet_args> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, directKryoMultiGet_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_domain()) { optionals.set(0); } if (struct.is_set_key()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.is_set_domain()) { oprot.writeString(struct.domain); } if (struct.is_set_key()) { { oprot.writeI32(struct.key.size()); for (ByteBuffer _iter84 : struct.key) { oprot.writeBinary(_iter84); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, directKryoMultiGet_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.domain = iprot.readString(); struct.set_domain_isSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list85 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.key = new ArrayList<ByteBuffer>(_list85.size); for (int _i86 = 0; _i86 < _list85.size; ++_i86) { ByteBuffer _elem87; // required _elem87 = iprot.readBinary(); struct.key.add(_elem87); } } struct.set_key_isSet(true); } } } } public static class directKryoMultiGet_result implements org.apache.thrift.TBase<directKryoMultiGet_result, directKryoMultiGet_result._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("directKryoMultiGet_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); private static final org.apache.thrift.protocol.TField DNFE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnfe", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField HDE_FIELD_DESC = new org.apache.thrift.protocol.TField("hde", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final org.apache.thrift.protocol.TField DNLE_FIELD_DESC = new org.apache.thrift.protocol.TField("dnle", org.apache.thrift.protocol.TType.STRUCT, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new directKryoMultiGet_resultStandardSchemeFactory()); schemes.put(TupleScheme.class, new directKryoMultiGet_resultTupleSchemeFactory()); } private List<elephantdb.generated.Value> success; // required private elephantdb.generated.DomainNotFoundException dnfe; // required private elephantdb.generated.HostsDownException hde; // required private elephantdb.generated.DomainNotLoadedException dnle; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"), DNFE((short)1, "dnfe"), HDE((short)2, "hde"), DNLE((short)3, "dnle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; case 1: // DNFE return DNFE; case 2: // HDE return HDE; case 3: // DNLE return DNLE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, elephantdb.generated.Value.class)))); tmpMap.put(_Fields.DNFE, new org.apache.thrift.meta_data.FieldMetaData("dnfe", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.HDE, new org.apache.thrift.meta_data.FieldMetaData("hde", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); tmpMap.put(_Fields.DNLE, new org.apache.thrift.meta_data.FieldMetaData("dnle", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(directKryoMultiGet_result.class, metaDataMap); } public directKryoMultiGet_result() { } public directKryoMultiGet_result( List<elephantdb.generated.Value> success, elephantdb.generated.DomainNotFoundException dnfe, elephantdb.generated.HostsDownException hde, elephantdb.generated.DomainNotLoadedException dnle) { this(); this.success = success; this.dnfe = dnfe; this.hde = hde; this.dnle = dnle; } /** * Performs a deep copy on <i>other</i>. */ public directKryoMultiGet_result(directKryoMultiGet_result other) { if (other.is_set_success()) { List<elephantdb.generated.Value> __this__success = new ArrayList<elephantdb.generated.Value>(); for (elephantdb.generated.Value other_element : other.success) { __this__success.add(new elephantdb.generated.Value(other_element)); } this.success = __this__success; } if (other.is_set_dnfe()) { this.dnfe = new elephantdb.generated.DomainNotFoundException(other.dnfe); } if (other.is_set_hde()) { this.hde = new elephantdb.generated.HostsDownException(other.hde); } if (other.is_set_dnle()) { this.dnle = new elephantdb.generated.DomainNotLoadedException(other.dnle); } } public directKryoMultiGet_result deepCopy() { return new directKryoMultiGet_result(this); } @Override public void clear() { this.success = null; this.dnfe = null; this.hde = null; this.dnle = null; } public int get_success_size() { return (this.success == null) ? 0 : this.success.size(); } public java.util.Iterator<elephantdb.generated.Value> get_success_iterator() { return (this.success == null) ? null : this.success.iterator(); } public void add_to_success(elephantdb.generated.Value elem) { if (this.success == null) { this.success = new ArrayList<elephantdb.generated.Value>(); } this.success.add(elem); } public List<elephantdb.generated.Value> get_success() { return this.success; } public void set_success(List<elephantdb.generated.Value> success) { this.success = success; } public void unset_success() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean is_set_success() { return this.success != null; } public void set_success_isSet(boolean value) { if (!value) { this.success = null; } } public elephantdb.generated.DomainNotFoundException get_dnfe() { return this.dnfe; } public void set_dnfe(elephantdb.generated.DomainNotFoundException dnfe) { this.dnfe = dnfe; } public void unset_dnfe() { this.dnfe = null; } /** Returns true if field dnfe is set (has been assigned a value) and false otherwise */ public boolean is_set_dnfe() { return this.dnfe != null; } public void set_dnfe_isSet(boolean value) { if (!value) { this.dnfe = null; } } public elephantdb.generated.HostsDownException get_hde() { return this.hde; } public void set_hde(elephantdb.generated.HostsDownException hde) { this.hde = hde; } public void unset_hde() { this.hde = null; } /** Returns true if field hde is set (has been assigned a value) and false otherwise */ public boolean is_set_hde() { return this.hde != null; } public void set_hde_isSet(boolean value) { if (!value) { this.hde = null; } } public elephantdb.generated.DomainNotLoadedException get_dnle() { return this.dnle; } public void set_dnle(elephantdb.generated.DomainNotLoadedException dnle) { this.dnle = dnle; } public void unset_dnle() { this.dnle = null; } /** Returns true if field dnle is set (has been assigned a value) and false otherwise */ public boolean is_set_dnle() { return this.dnle != null; } public void set_dnle_isSet(boolean value) { if (!value) { this.dnle = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SUCCESS: if (value == null) { unset_success(); } else { set_success((List<elephantdb.generated.Value>)value); } break; case DNFE: if (value == null) { unset_dnfe(); } else { set_dnfe((elephantdb.generated.DomainNotFoundException)value); } break; case HDE: if (value == null) { unset_hde(); } else { set_hde((elephantdb.generated.HostsDownException)value); } break; case DNLE: if (value == null) { unset_dnle(); } else { set_dnle((elephantdb.generated.DomainNotLoadedException)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return get_success(); case DNFE: return get_dnfe(); case HDE: return get_hde(); case DNLE: return get_dnle(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SUCCESS: return is_set_success(); case DNFE: return is_set_dnfe(); case HDE: return is_set_hde(); case DNLE: return is_set_dnle(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof directKryoMultiGet_result) return this.equals((directKryoMultiGet_result)that); return false; } public boolean equals(directKryoMultiGet_result that) { if (that == null) return false; boolean this_present_success = true && this.is_set_success(); boolean that_present_success = true && that.is_set_success(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } boolean this_present_dnfe = true && this.is_set_dnfe(); boolean that_present_dnfe = true && that.is_set_dnfe(); if (this_present_dnfe || that_present_dnfe) { if (!(this_present_dnfe && that_present_dnfe)) return false; if (!this.dnfe.equals(that.dnfe)) return false; } boolean this_present_hde = true && this.is_set_hde(); boolean that_present_hde = true && that.is_set_hde(); if (this_present_hde || that_present_hde) { if (!(this_present_hde && that_present_hde)) return false; if (!this.hde.equals(that.hde)) return false; } boolean this_present_dnle = true && this.is_set_dnle(); boolean that_present_dnle = true && that.is_set_dnle(); if (this_present_dnle || that_present_dnle) { if (!(this_present_dnle && that_present_dnle)) return false; if (!this.dnle.equals(that.dnle)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_success = true && (is_set_success()); builder.append(present_success); if (present_success) builder.append(success); boolean present_dnfe = true && (is_set_dnfe()); builder.append(present_dnfe); if (present_dnfe) builder.append(dnfe); boolean present_hde = true && (is_set_hde()); builder.append(present_hde); if (present_hde) builder.append(hde); boolean present_dnle = true && (is_set_dnle()); builder.append(present_dnle); if (present_dnle) builder.append(dnle); return builder.toHashCode(); } public int compareTo(directKryoMultiGet_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; directKryoMultiGet_result typedOther = (directKryoMultiGet_result)other; lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success()); if (lastComparison != 0) { return lastComparison; } if (is_set_success()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnfe()).compareTo(typedOther.is_set_dnfe()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnfe()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnfe, typedOther.dnfe); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_hde()).compareTo(typedOther.is_set_hde()); if (lastComparison != 0) { return lastComparison; } if (is_set_hde()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hde, typedOther.hde); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_dnle()).compareTo(typedOther.is_set_dnle()); if (lastComparison != 0) { return lastComparison; } if (is_set_dnle()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dnle, typedOther.dnle); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("directKryoMultiGet_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; if (!first) sb.append(", "); sb.append("dnfe:"); if (this.dnfe == null) { sb.append("null"); } else { sb.append(this.dnfe); } first = false; if (!first) sb.append(", "); sb.append("hde:"); if (this.hde == null) { sb.append("null"); } else { sb.append(this.hde); } first = false; if (!first) sb.append(", "); sb.append("dnle:"); if (this.dnle == null) { sb.append("null"); } else { sb.append(this.dnle); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class directKryoMultiGet_resultStandardSchemeFactory implements SchemeFactory { public directKryoMultiGet_resultStandardScheme getScheme() { return new directKryoMultiGet_resultStandardScheme(); } } private static class directKryoMultiGet_resultStandardScheme extends StandardScheme<directKryoMultiGet_result> { public void read(org.apache.thrift.protocol.TProtocol iprot, directKryoMultiGet_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list88 = iprot.readListBegin(); struct.success = new ArrayList<elephantdb.generated.Value>(_list88.size); for (int _i89 = 0; _i89 < _list88.size; ++_i89) { elephantdb.generated.Value _elem90; // required _elem90 = new elephantdb.generated.Value(); _elem90.read(iprot); struct.success.add(_elem90); } iprot.readListEnd(); } struct.set_success_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 1: // DNFE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // HDE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // DNLE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, directKryoMultiGet_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); for (elephantdb.generated.Value _iter91 : struct.success) { _iter91.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.dnfe != null) { oprot.writeFieldBegin(DNFE_FIELD_DESC); struct.dnfe.write(oprot); oprot.writeFieldEnd(); } if (struct.hde != null) { oprot.writeFieldBegin(HDE_FIELD_DESC); struct.hde.write(oprot); oprot.writeFieldEnd(); } if (struct.dnle != null) { oprot.writeFieldBegin(DNLE_FIELD_DESC); struct.dnle.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class directKryoMultiGet_resultTupleSchemeFactory implements SchemeFactory { public directKryoMultiGet_resultTupleScheme getScheme() { return new directKryoMultiGet_resultTupleScheme(); } } private static class directKryoMultiGet_resultTupleScheme extends TupleScheme<directKryoMultiGet_result> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, directKryoMultiGet_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_success()) { optionals.set(0); } if (struct.is_set_dnfe()) { optionals.set(1); } if (struct.is_set_hde()) { optionals.set(2); } if (struct.is_set_dnle()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.is_set_success()) { { oprot.writeI32(struct.success.size()); for (elephantdb.generated.Value _iter92 : struct.success) { _iter92.write(oprot); } } } if (struct.is_set_dnfe()) { struct.dnfe.write(oprot); } if (struct.is_set_hde()) { struct.hde.write(oprot); } if (struct.is_set_dnle()) { struct.dnle.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, directKryoMultiGet_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list93 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.success = new ArrayList<elephantdb.generated.Value>(_list93.size); for (int _i94 = 0; _i94 < _list93.size; ++_i94) { elephantdb.generated.Value _elem95; // required _elem95 = new elephantdb.generated.Value(); _elem95.read(iprot); struct.success.add(_elem95); } } struct.set_success_isSet(true); } if (incoming.get(1)) { struct.dnfe = new elephantdb.generated.DomainNotFoundException(); struct.dnfe.read(iprot); struct.set_dnfe_isSet(true); } if (incoming.get(2)) { struct.hde = new elephantdb.generated.HostsDownException(); struct.hde.read(iprot); struct.set_hde_isSet(true); } if (incoming.get(3)) { struct.dnle = new elephantdb.generated.DomainNotLoadedException(); struct.dnle.read(iprot); struct.set_dnle_isSet(true); } } } } }
/* * * * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * * */ package eu.amidst.core.datastream.filereaders.arffFileReader; import eu.amidst.core.datastream.Attribute; import eu.amidst.core.datastream.Attributes; import eu.amidst.core.datastream.filereaders.DataFileReader; import eu.amidst.core.datastream.filereaders.DataRow; import eu.amidst.core.variables.StateSpaceTypeEnum; import eu.amidst.core.variables.stateSpaceTypes.FiniteStateSpace; import eu.amidst.core.variables.stateSpaceTypes.SparseFiniteStateSpace; import eu.amidst.core.variables.stateSpaceTypes.RealStateSpace; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; /** * This class implements the interface {@link DataFileReader} and defines an ARFF (Weka Attribute-Relation File Format) data reader. */ public class ARFFDataReader implements DataFileReader { /** Represents the relation name. */ String relationName; /** Represents the list of {@link Attributes}. */ private Attributes attributes; /** Represents the data line count. */ private int dataLineCount; /** Represents the path of the ARFF file to be read. */ private Path pathFile; /** Represents an array of {@link StateSpaceTypeEnum} for the corresponding list of {@link Attributes}. */ private StateSpaceTypeEnum[] stateSpace; /** Represents a {@code Stream} of {@code DataRow}. */ private Stream<DataRow> streamString; /** * Creates an {@link Attribute} from a given index and line. * @param index an {@code int} that represents the index of column to which the Attribute refers. * @param line a {@code String} starting with "@attribute" and including the name of the Attribute and its state space type. * @return an {@link Attribute} object. */ public static Attribute createAttributeFromLine(int index, String line){ String[] parts = line.split("\\s+|\t+"); if (!parts[0].trim().startsWith("@attribute")) throw new IllegalArgumentException("Attribute line does not start with @attribute"); String name = parts[1].trim(); //name = StringUtils.strip(name,"'"); name = name.replaceAll("^'+", ""); name = name.replaceAll("'+$", ""); parts[2]=parts[2].trim(); if (parts[2].equals("real") || parts[2].equals("numeric")){ if(parts.length>3 && parts[3].startsWith("[")){ parts[3]=line.substring(line.indexOf("[")).replaceAll("\t", ""); double min = Double.parseDouble(parts[3].substring(parts[3].indexOf("[")+1,parts[3].indexOf(","))); double max = Double.parseDouble(parts[3].substring(parts[3].indexOf(",")+1,parts[3].indexOf("]"))); return new Attribute(index, name, new RealStateSpace(min,max)); }else return new Attribute(index, name, new RealStateSpace()); }else if (parts[2].startsWith("{")){ parts[2]=line.substring(line.indexOf("{")).replaceAll("\t", ""); String[] states = parts[2].substring(1,parts[2].length()-1).split(","); List<String> statesNames = Arrays.stream(states).map(String::trim).collect(Collectors.toList()); return new Attribute(index, name, new FiniteStateSpace(statesNames)); }else if (parts[2].equals("SparseMultinomial")) { return new Attribute(index, name, new SparseFiniteStateSpace(Integer.parseInt(parts[3]))); }else{ throw new UnsupportedOperationException("We can not create an attribute from this line: "+line); } } /** * {@inheritDoc} */ @Override public void loadFromFile(String pathString) { pathFile = Paths.get(pathString); try { Optional<String> atRelation = Files.lines(pathFile) .map(String::trim) .filter(w -> !w.isEmpty()) .filter(w -> !w.startsWith("%")) .limit(1) .filter(line -> line.startsWith("@relation")) .findFirst(); if (!atRelation.isPresent()) throw new IllegalArgumentException("ARFF file does not start with a @relation line."); relationName = atRelation.get().split(" ")[1]; final int[] count = {0}; Optional<String> atData = Files.lines(pathFile) .map(String::trim) .filter(w -> !w.isEmpty()) .filter(w -> !w.startsWith("%")) .peek(line -> count[0]++) .filter(line -> line.startsWith("@data")) .findFirst(); if (!atData.isPresent()) throw new IllegalArgumentException("ARFF file does not contain @data line."); dataLineCount = count[0]; List<String> attLines = Files.lines(pathFile) .map(String::trim) .filter(w -> !w.isEmpty()) .filter(w -> !w.startsWith("%")) .limit(dataLineCount) .filter(line -> line.startsWith("@attribute")) .collect(Collectors.toList()); List<Attribute> atts = IntStream.range(0,attLines.size()) .mapToObj( i -> createAttributeFromLine(i, attLines.get(i))) .collect(Collectors.toList()); this.attributes = new Attributes(atts); // stateSpace=new StateSpaceTypeEnum[atts.size()]; for (Attribute att: atts){ stateSpace[att.getIndex()] = att.getStateSpaceType().getStateSpaceTypeEnum(); } }catch (IOException ex){ throw new UncheckedIOException(ex); } } /** * {@inheritDoc} */ @Override public Attributes getAttributes() { return this.attributes; } /** * {@inheritDoc} */ @Override public boolean doesItReadThisFile(String fileName) { if (new File(fileName).isDirectory()) return false; String[] parts = fileName.split("\\."); return parts[parts.length-1].equals("arff"); } /** * {@inheritDoc} */ //TODO: In principle the "if" should be there to enforce a reset of the stream. But there is a bug. @Override public Stream<DataRow> stream() { //if (streamString ==null) { try { streamString = Files.lines(pathFile) .filter(w -> !w.isEmpty()) .filter(w -> !w.startsWith("%")) .skip(this.dataLineCount) .filter(w -> !w.isEmpty()) .map(line -> new DataRowWeka(this.attributes, line)); } catch (IOException ex) { throw new UncheckedIOException(ex); } //} return streamString; } /** * {@inheritDoc} */ @Override public void restart(){ streamString = null; } }
package br.jus.tst.esocialjt.dominio; import java.io.Serializable; import java.util.Date; import java.util.HashSet; import java.util.Set; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.NamedQuery; import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import com.fasterxml.jackson.annotation.JsonIgnore; import org.hibernate.annotations.Type; @Entity @Table(name = "EST_ENVIO_EVENTO") @NamedQuery(name = "EnvioEvento.findAll", query = "SELECT e FROM EnvioEvento e") public class EnvioEvento implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQ_ENV_EVT_ID") @SequenceGenerator(name = "SEQ_ENV_EVT_ID", sequenceName = "SEQ_ENV_EVT_ID", allocationSize = 1) @Column(name = "COD_ENVIO_EVENTO") private Long id; @JsonIgnore @ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY) @JoinColumn(name = "COD_EVENTO", referencedColumnName = "COD_EVENTO") private Evento evento; @ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.EAGER) @JoinColumn(name = "COD_LOTE", referencedColumnName = "COD_LOTE") private Lote lote; @ManyToOne @JoinColumn(name = "COD_RESPOSTA", referencedColumnName = "COD_IDENTIFICADOR") private CodigoResposta codRespostaProcessamento; @Temporal(TemporalType.DATE) @Column(name = "DTA_GERACAO_EVENTO") private Date dtaGeracaoEvento; @Column(name = "NUM_VERSAO") private String versao; @Lob @Type(type = "org.hibernate.type.TextType") @Column(name = "TXT_ERRO_INTERNO") private String erroInterno; @Transient @JsonIgnore private String xmlEvento; @OneToMany(mappedBy = "envioEvento", fetch = FetchType.EAGER, cascade = CascadeType.ALL) private Set<ErroProcessamento> errosProcessamento = new HashSet<>(); public EnvioEvento() { } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Set<ErroProcessamento> getErrosProcessamento() { return errosProcessamento; } public void setErrosProcessamento(Set<ErroProcessamento> errosProcessamento) { this.errosProcessamento = errosProcessamento; } public Evento getEvento() { return evento; } public void setEvento(Evento evento) { this.evento = evento; } public Lote getLote() { return lote; } public void setLote(Lote lote) { this.lote = lote; } public CodigoResposta getCodRespostaProcessamento() { return codRespostaProcessamento; } public void setCodRespostaProcessamento(CodigoResposta codRespostaProcessamento) { this.codRespostaProcessamento = codRespostaProcessamento; } public Date getDtaGeracaoEvento() { return dtaGeracaoEvento; } public void setDtaGeracaoEvento(Date dtaGeracaoEvento) { this.dtaGeracaoEvento = dtaGeracaoEvento; } public String getVersao() { return versao; } public void setVersao(String versao) { this.versao = versao; } public String getErroInterno() { return erroInterno; } public void setErroInterno(String erroInterno) { this.erroInterno = erroInterno; } public String getXmlEvento() { return xmlEvento; } public void setXmlEvento(String xmlEvento) { this.xmlEvento = xmlEvento; } }
package com.karumien.cloud.ais.api; import java.io.IOException; import java.time.DayOfWeek; import java.time.LocalDate; import java.time.temporal.TemporalAdjusters; import java.util.Arrays; import java.util.List; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import javax.validation.constraints.NotNull; import org.modelmapper.ModelMapper; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import com.karumien.client.adochazka.schemas.Uzivatel; import com.karumien.cloud.ais.api.handler.WorkApi; import com.karumien.cloud.ais.api.model.PassDTO; import com.karumien.cloud.ais.api.model.UserInfoDTO; import com.karumien.cloud.ais.api.model.WorkDTO; import com.karumien.cloud.ais.api.model.WorkDayDTO; import com.karumien.cloud.ais.api.model.WorkDayTypeDTO; import com.karumien.cloud.ais.api.model.WorkHourDTO; import com.karumien.cloud.ais.api.model.WorkMonthDTO; import com.karumien.cloud.ais.api.model.WorkTypeDTO; import com.karumien.cloud.ais.service.AISService; /** * REST API for AIS Services. * * @author <a href="miroslav.svoboda@karumien.com">Miroslav Svoboda</a> * @since 1.0, 15. 4. 2019 18:54:23 */ @RestController @RequestMapping(path = "/api") public class AISWorkRestController implements WorkApi { private static final String ATTACHMENT_FILENAME = "attachment; filename="; private static final String CONTENT_DISPOSITION = "Content-disposition"; // private static final String CONTENT_TYPE = "content-type"; /** MediaType Application Excel Openformat */ private static final String APPLICATION_EXCEL_VALUE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"; @Autowired private ModelMapper mapper; @Autowired private AISService aisService; @Value(value = "${jsp.redirect:false}") private Boolean redirect; /** * {@inheritDoc} */ @Override public ResponseEntity<WorkMonthDTO> getWorkDays(@NotNull @Valid String username, @Valid Integer year, @Valid Integer month) { return new ResponseEntity<>( mapper.map(aisService.getWorkDays(year, month, username), WorkMonthDTO.class), HttpStatus.OK); } /** * {@inheritDoc} */ @Override public ResponseEntity<List<UserInfoDTO>> getWorkUsers(@Valid String username) { return new ResponseEntity<>(aisService.getWorkUsers(username), HttpStatus.OK); } /** * GET /work/export/xls : Generate export workdays * * @param response * {@link HttpServletResponse} * @throws IOException * on IO error */ @RequestMapping(value = "/work/export", produces = { APPLICATION_EXCEL_VALUE }, method = RequestMethod.POST) public void exportWorkDays(@NotNull @Valid @RequestParam(value = "role", required = true) String role, @Valid @RequestParam(value = "username", required = false) String username, @Valid @RequestParam(value = "month", required = false) Integer month, @Valid @RequestParam(value = "year", required = false) Integer year, HttpServletResponse response) throws IOException { if (year == null) { year = LocalDate.now().getYear(); } if (month == null) { month = LocalDate.now().getMonthValue(); } if (username == null) { username = role; } String yearmonth = year + "." + (month < 10 ? "0" : "") + month; response.setHeader(CONTENT_DISPOSITION, ATTACHMENT_FILENAME + yearmonth + "-" + username + ".xlsx"); //".pdf"); aisService.exportWorkDays(year, month, username, response.getOutputStream()); } /** * HTML formated Users on site. * * @deprecated will be replaced by UI application * @return HTML table of Users on site */ @RequestMapping(value = "/work/html", produces = { "text/html" }, method = RequestMethod.GET) @Deprecated public String getUserMonthHTML(@NotNull @Valid @RequestParam(value = "role", required = true) String role, @Valid @RequestParam(value = "username", required = false) String username, @Valid @RequestParam(value = "month", required = false) Integer month, @Valid @RequestParam(value = "year", required = false) Integer year, @Valid @RequestParam(value = "day", required = false) Integer day) { if (year == null) { year = LocalDate.now().getYear(); } if (month == null) { month = LocalDate.now().getMonthValue(); } if (username == null) { username = role; } // if (month < 4) { // year = LocalDate.now().getYear(); // } // // if (month > 3) { // year = LocalDate.now().getYear()-1; // } if (month > 11 && LocalDate.now().getMonthValue() <= 11) { year = LocalDate.now().getYear()-1; } LocalDate actualMonthDay = LocalDate.now(); LocalDate previousMonthDay = LocalDate.now().withDayOfMonth(1).minusMonths(1); LocalDate selectedMonthDay = LocalDate.of(year, month, 1); boolean currentMonth = (actualMonthDay.getYear() == year && actualMonthDay.getMonthValue() == month); boolean previousMonth = (previousMonthDay.getYear() == year && previousMonthDay.getMonthValue() == month); boolean readonly = ! ( currentMonth || previousMonth ); UserInfoDTO selectedUser = mapper.map(aisService.getUser(username), UserInfoDTO.class); UserInfoDTO roleUser = mapper.map(aisService.getUser(role), UserInfoDTO.class); if (roleUser == null) { roleUser = selectedUser; } Uzivatel uzivatel = aisService.getUzivatel(username); String baseUrl = (Boolean.TRUE.equals(redirect) ? "" : "http://192.168.2.222:2222") ; if (day != null && uzivatel != null) { selectedMonthDay = LocalDate.of(year, month, day); StringBuilder sb = new StringBuilder("<b>" + aisService.date(selectedMonthDay) + "</b> " + selectedUser.getName()); List<PassDTO> accesses = aisService.getAccesses(selectedMonthDay, username); sb.append("<table cellspacing=\"5\" class=\"aditus\" border=\"0\">"); sb.append("<tr><td class=\"i24_tableHead menuline\">ID</td>") .append("<td class=\"i24_tableHead menuline\">Čas</td>") .append("<td class=\"i24_tableHead menuline\">Typ</td>") .append("<td class=\"i24_tableHead menuline\">Klávesa</td>") .append("<td class=\"i24_tableHead menuline\">Činnost</td>") .append("</tr>"); for (PassDTO access : accesses) { sb.append("<tr><td class=\"i24_tableItem\">").append(access.getId()).append("</td>"); sb.append("<td class=\"i24_tableItem\">").append(aisService.time(access.getDate())).append("</td>"); sb.append("<td class=\"i24_tableItem\">").append(access.getChip()).append("</td>"); sb.append("<td class=\"i24_tableItem\">").append(access.getCategoryId()).append("</td>"); sb.append("<td class=\"i24_tableItem\">").append(access.getCategory()).append("</td>"); sb.append("</tr>"); } sb.append("</table>"); return sb.toString(); } StringBuilder sb = new StringBuilder("<script type=\"text/javascript\">" + "function updateWork(form, username) {" + " var xhttp = new XMLHttpRequest();" + " xhttp.open(\"POST\", \""+ (Boolean.TRUE.equals(redirect) ? "" : "http://192.168.2.222:2222") + "/api/work/update?username=" + "\"+username, true);" + " xhttp.setRequestHeader(\"Content-type\", \"application/json\");" + " if (form.hours.value && form.workType.value != 'NONE' && form.hours2.value && form.workType2.value != 'NONE'" + " || !form.hours.value && form.workType.value == 'NONE' && form.hours2.value && form.workType2.value != 'NONE'" + " || form.hours.value && form.workType.value != 'NONE' && !form.hours2.value && form.workType2.value == 'NONE'" + " || !form.hours.value && form.workType.value == 'NONE' && !form.hours2.value && form.workType2.value == 'NONE'" + " || form.description.value != form.originalDescription.value" + ") {" + " xhttp.send('{ \"id\": ' + form.id.value + '," + " \"hoursText\": ' + (!form.hours.value ? null : '\"' + form.hours.value + '\"') + '," + " \"hours2Text\": ' + (!form.hours2.value ? null : '\"' + form.hours2.value + '\"') + '," + " \"workType\": \"' + form.workType.value + '\"," + " \"workType2\": \"' + form.workType2.value + '\"," + " \"description\": \"' + form.description.value + '\" }');" + " }}</script>"); sb.append("<table cellspacing=\"5\" class=\"aditus\" border=\"0\"><form action=\""+ (Boolean.TRUE.equals(redirect) ? "/api/work/html" : "/ais.jsp" ) + "\" method=\"get\">"); sb.append("<tr><td colspan=\"6\"><select name=\"month\" class=\"unvisiblelines\" onchange=\"this.form.submit()\">"); List<String> months = Arrays.asList("leden", "únor", "březen", "duben", "květen", "červen", "červenec", "srpen", "září", "říjen", "listopad", "prosinec"); for (int i = 0; i < 12; i++) { sb.append("<option value=\"").append(i+1).append("\"").append(month.equals(i+1) ? " selected" : ""); sb.append(">").append(months.get(i)).append("</option>"); } sb.append("</select><select class=\"unvisiblelines\" onchange=\"this.form.submit()\">"); for (int i = 2020; i <= LocalDate.now().getYear() ;i++) { sb.append("<option" + (LocalDate.now().getYear() == year ? " selected" : "") + ">"+ i + "</option>"); } sb.append("</select>"); sb.append("<input type=\"hidden\" name=\"year\" value=\"").append(year).append("\">"); sb.append("<input type=\"hidden\" name=\"role\" value=\"").append(role).append("\">"); sb.append("</td>"); sb.append("<td align=\"right\"><select class=\"unvisiblelines\" name=\"username\" onchange=\"this.form.submit()\">"); for (UserInfoDTO user : aisService.getWorkUsers(role)) { sb.append("<option value=\"").append(user.getUsername()).append("\"").append(username.equals(user.getUsername()) ? " selected" : ""); sb.append(">").append(user.getName()).append("</option>"); } boolean schval = Boolean.TRUE.equals(roleUser.isRoleAdmin()) || Boolean.TRUE.equals(roleUser.isRoleHip()); sb.append("</select></td><td></td><td align=\"right\">"); if (!readonly && schval && !currentMonth && selectedMonthDay.isBefore(actualMonthDay)) { sb.append("<a href=\"#\" class=\"buttonSubmit\" title=\"Schválit vybraný měsíc dané osobě\">&nbsp; Schválit</a>"); } sb.append("&nbsp;<a href=\"/works.do?action=list&object=native_works&clear=1\" target=\"_parent\" class=\"buttonSubmit\">&nbsp; Výkazy zakázky</a></td></tr></form>"); sb.append("<form id=\"exportForm\" action=\"" + (Boolean.TRUE.equals(redirect) ? "" : "http://192.168.2.222:2222") + "/api/work/export?username=" +username+"&role="+role+"&year="+year+"&month="+month + "\" method=\"post\">"); sb.append("<tr>"); sb.append("<td class=\"i24_tableHead menuline\" align=\"right\">Datum</td>" + "<td class=\"i24_tableHead menuline\">Příchod</td>" + "<td class=\"i24_tableHead menuline\" align=\"center\">Oběd/přest.</td>" + "<td class=\"i24_tableHead menuline\">Odchod</td>" // + "<td class=\"i24_tableHead menuline\" align=\"right\">Služebně</td>" // + "<td class=\"i24_tableHead menuline\" align=\"right\">Nemoc</td>" + "<td class=\"i24_tableHead menuline\" align=\"left\">Celkem</td>" + "<td class=\"i24_tableHead menuline\" align=\"right\">Saldo</td>" + "<td class=\"i24_tableHead menuline\" style=\"text-align: right\">Výkazy (").append(username).append(")"); //if (roleUser.isRoleAdmin()) { sb.append("<a href=\"#\" onclick=\"document.getElementById('exportForm').submit();\">"); sb.append("<img onclick=\"this.form.submit();\" src=\"/img/printer.gif\" style=\"position: relative; top: 4px; margin-left: 6px; width: 15px; height: 16px;\" border=\"0\"/></a>"); //} sb.append("</td><td></td><td class=\"i24_tableHead menuline\">&nbsp; Poznámka (hodiny/zakázka)</td></tr></form>"); double fond = selectedUser.getFond() != null ? selectedUser.getFond() / 100d : 1d; double saldo = 0; double unpaid = 0; WorkMonthDTO workMonthDTO = aisService.getWorkDays(year, month, username); for (WorkDayDTO workDay : workMonthDTO.getWorkDays()) { WorkDTO work = workDay.getWork(); sb.append("<tr>"); if (work != null) { sb.append("<form name=\"form"+ work.getId() +"\">"); } String link = ""; String linkEnd = ""; if (workDay.getWorkDayType() == WorkDayTypeDTO.WORKDAY && isPast(actualMonthDay, workDay.getDate())) { link = "<a href=\""+ baseUrl + "/api/work/html?month=" + month + "&year=" + year + "&day=" + workDay.getDate().getDayOfMonth() + "&role=" + role + "&username=" + username + "\" target=\"workday\">"; linkEnd = "</a>"; } sb.append("<td class=\"i24_tableItem\"><i>" + link).append(aisService.date(workDay.getDate())).append("</i>" + linkEnd + "</td>"); if (workDay.getWorkDayType() == WorkDayTypeDTO.NATIONAL_HOLIDAY) { sb.append("<td class=\"i24_tableItem\" colspan=\"8\">").append(getDescription(workDay.getWorkDayType())).append("</td>"); continue; } sb.append("<td class=\"i24_tableItem\"><b>").append(hoursOnly(workDay.getWorkStart())).append("</b></td>"); if (workDay.getWorkDayType() == WorkDayTypeDTO.WORKDAY && isPast(actualMonthDay, workDay.getDate())) { sb.append("<td class=\"i24_tableItem\" align=\"center\">") .append(corrected(workDay.getLunch(), workDay.getOriginalLunch())).append("</td>"); } sb.append("<td class=\"i24_tableItem\"><b>").append(hoursOnly(workDay.getWorkEnd())).append("</b></td>"); if (workDay.getDate().getDayOfWeek() != DayOfWeek.SATURDAY && workDay.getDate().getDayOfWeek() != DayOfWeek.SUNDAY && workDay.getWorkDayType() != WorkDayTypeDTO.NATIONAL_HOLIDAY) { if (workDay.getWork() == null) { continue; } if (workDay.getWorkDayType() == WorkDayTypeDTO.WORKDAY && isPast(actualMonthDay, workDay.getDate())) { // sb.append("<td class=\"i24_tableItem\" align=\"right\">").append( // aisService.hours(workDay.getTrip(), false)).append("</td>"); // sb.append("<td class=\"i24_tableItem\" align=\"right\">").append( // aisService.hours(workDay.getSick(), false)).append("</td>"); String adv = ""; if (workDay.getTrip() != null && workDay.getTrip() > 0) { adv += "Služební cesta: " + aisService.hours(workDay.getTrip(), false) + "\n"; } if (workDay.getSick() != null && workDay.getSick() > 0) { adv += "Lékař/Nemoc : " + aisService.hours(workDay.getSick(), false) + "\n"; } if (workDay.getPayed() != null && workDay.getPayed() > 0) { adv += "Placené volno : " + aisService.hours(workDay.getPayed(), false) + "\n"; } if (workDay.getUnpaid() != null && workDay.getUnpaid() > 0 && schval) { adv += "Neuznaný přesčas : " + aisService.hours(workDay.getUnpaid(), false) + "\n"; unpaid += workDay.getUnpaid(); } sb.append("<td class=\"i24_tableItem\" align=\"left\"><div " + (adv.length() > 0 ? "title =\"" + adv + "\"" : "" ) + "><b>").append(aisService.hours(workDay.getWorkedHours())).append("</b>" + (adv.length() > 0 ? "<span class=\"i24_tableHead menuline\"> (?)</span>" : "") + "</div></td>"); double actualSaldo = workDay.getSaldo() != null ? workDay.getSaldo() : 0; // Holiday correction if (work.getWorkType() == WorkTypeDTO.HOLIDAY || work.getWorkType() == WorkTypeDTO.PAID_LEAVE) { actualSaldo += work.getHours() != null ? work.getHours() : 0; } if (work.getWorkType2() == WorkTypeDTO.HOLIDAY || work.getWorkType2() == WorkTypeDTO.PAID_LEAVE) { actualSaldo += work.getHours2() != null ? work.getHours2() : 0; } sb.append("<td class=\"i24_tableItem\" align=\"right\">").append(work != null ? saldo(actualSaldo) : "").append("</td>"); saldo += actualSaldo; } else { // sb.append("<td class=\"i24_tableItem\" align=\"right\">").append("</td>"); // sb.append("<td class=\"i24_tableItem\" align=\"right\">").append("</td>"); sb.append("<td class=\"i24_tableItem\" align=\"right\">").append("</td>"); sb.append("<td class=\"i24_tableItem\" align=\"right\">").append("<td>"); } if (readonly) { sb.append("<td class=\"i24_tableItem\"><input class=\"unvisiblelines\" type=\"text\" readonly=\"readonly\" name=\"hours\" style=\"width: 35px; margin-left:10px\" value=\"") .append(work != null ? aisService.hours(work.getHours()) : "") .append("\"><input class=\"unvisiblelines\" name=\"workType\" readonly=\"readonly\" value=\"" + (work.getWorkType() != null ? aisService.getDescription(work.getWorkType()) : "") + "\">"); sb.append("</td>"); sb.append("<td class=\"i24_tableItem\"><input class=\"unvisiblelines\" type=\"text\" readonly=\"readonly\" name=\"hours2\" style=\"width: 35px; margin-left:10px\" value=\"") .append(work != null ? aisService.hours(work.getHours2()) : "") .append("\"><input class=\"unvisiblelines\" name=\"workType\" readonly=\"readonly\" value=\"" + (work.getWorkType() != null ? aisService.getDescription(work.getWorkType2()) : "") + "\">"); sb.append("</td>"); sb.append("<td class=\"i24_tableItem\"><input class=\"unvisiblelines\" name=\"description\" type=\"text\" readonly=\"readonly\" style=\"width: 350px; margin-left:10px\" value=\"") .append(work != null && work.getDescription() != null ? work.getDescription() : "") .append("\"></td>"); } else { sb.append("<td class=\"i24_tableItem\"><input type=\"hidden\" name=\"id\" value=\""+ work.getId() +"\">" + "<input class=\"unvisiblelines\" onChange=\"updateWork(this.form, '"+username+"')\" type=\"text\" name=\"hours\" style=\"width: 35px; margin-left:10px\" value=\"") .append(work != null ? aisService.hours(work.getHours()) : "") .append("\"><select class=\"unvisiblelines\" name=\"workType\" onChange=\"updateWork(this.form, '"+username+"')\">"); for (WorkTypeDTO type: WorkTypeDTO.values()) { sb.append("<option value=\"").append(type.name()).append("\"").append(work != null && work.getWorkType() == type ? " selected" : ""); sb.append(">").append(aisService.getDescription(type)).append("</option>"); } sb.append("</select></td>"); sb.append("<td class=\"i24_tableItem\"><input class=\"unvisiblelines\" onChange=\"updateWork(this.form, '"+username+"')\" name=\"hours2\" type=\"text\" style=\"width: 35px; margin-left:10px\" value=\"") .append(work != null ? aisService.hours(work.getHours2()) : "") .append("\"><select class=\"unvisiblelines\" name=\"workType2\" onChange=\"updateWork(this.form, '"+username+"')\">"); for (WorkTypeDTO type: WorkTypeDTO.values()) { sb.append("<option value=\"").append(type.name()).append("\"").append(work != null && work.getWorkType2() == type ? " selected" : ""); sb.append(">").append(aisService.getDescription(type)).append("</option>"); } sb.append("</select></td>"); sb.append("<td class=\"i24_tableItem\"><input type=\"hidden\" name=\"originalDescription\" value=\"") .append(work != null && work.getDescription() != null ? work.getDescription() : "") .append("\"><input class=\"unvisiblelines\" onChange=\"updateWork(this.form, '"+username+"')\" name=\"description\" type=\"text\" style=\"width: 350px; margin-left:10px\" value=\"") .append(work != null && work.getDescription() != null ? work.getDescription() : "") .append("\"></td>"); } } if (work != null) { sb.append("</form>"); } sb.append("</tr>"); if (workDay.getDate().getDayOfWeek() == DayOfWeek.SUNDAY && ! workDay.getDate().isEqual(workDay.getDate().with(TemporalAdjusters.lastDayOfMonth()))) { sb.append("<tr><td colspan=\"9\"><hr/></td></tr>"); } } sb.append("</table>"); StringBuilder sb1 = new StringBuilder("<table cellspacing=\"0\" cellpadding=\"5\" class=\"aditus\"><tr><td colspan=\"5\"><hr/></td></tr><tr>"); StringBuilder sb2 = new StringBuilder("<tr>"); sb1.append("<td class=\"i24_tableItem\"><i>").append("Svátky").append("</i></td>"); sb2.append("<td class=\"i24_tableItem\"><b>").append(aisService.days(workMonthDTO.getSumHolidays())).append("</b></td>"); sb1.append("<td class=\"i24_tableItem\"><i><b>").append("Fond").append("</b></i></td>"); sb2.append("<td class=\"i24_tableItem\"><b>").append( selectedUser.getFond() == null ? aisService.days(workMonthDTO.getSumWorkDays()) : aisService.days(workMonthDTO.getSumWorkDays() * fond) + "</b> / " + aisService.days(workMonthDTO.getSumWorkDays()) ).append("</b></td>"); sb1.append("<td class=\"i24_tableItem\" style=\"background-color: #EFEFEF\"><i>").append("Celkem").append("</i></td>"); sb2.append("<td class=\"i24_tableItem\" style=\"background-color: #EFEFEF\"><b>").append(aisService.days(workMonthDTO.getSumHolidays() + workMonthDTO.getSumWorkDays())).append("</b></td>"); if (uzivatel != null) { sb1.append("<td class=\"i24_tableItem\" style=\"#888888\"><i title=\"Saldo ke konci včerejšího dne\">").append("ADocházka (?)").append("</i></td>"); sb2.append("<td class=\"i24_tableItem\"><b>").append(aisService.hours(workMonthDTO.getSumOnSiteDays())).append("</b> (" + saldo( saldo )).append(")</td>"); if (unpaid > 0 && schval) { sb1.append("<td class=\"i24_tableItem\" style=\"#888888\"><i title=\"Neuznaný přesčas\">").append("Neuznáno (?)").append("</i></td>"); sb2.append("<td class=\"i24_tableItem\"><b>").append(aisService.hours(unpaid )).append("</td>"); } } double worked = 0; // work types for (WorkDTO work : workMonthDTO.getSums()) { if (aisService.isWorkingType(work.getWorkType())) { worked += work.getHours() == null ? 0 : work.getHours() / AISService.HOURS_IN_DAY; sb1.append("<td class=\"i24_tableItem\" style=\"background-color: #7FDBFF\"><i>").append(aisService.getDescription(work.getWorkType())).append("</i></td>"); sb2.append("<td class=\"i24_tableItem\" style=\"background-color: #7FDBFF\"><b>") .append(aisService.days(work.getHours() == null ? null : work.getHours() / AISService.HOURS_IN_DAY)).append("</b></td>"); } } sb1.append("<td class=\"i24_tableItem\"><i><b>").append("Odpracováno").append("</b></i></td>"); sb2.append("<td class=\"i24_tableItem\"><b>").append(aisService.days(worked)).append("</b></td>"); double sum = workMonthDTO.getSumHolidays() + worked; // non-work types for (WorkDTO work : workMonthDTO.getSums()) { if (!aisService.isWorkingType(work.getWorkType())) { double val = work.getHours() == null ? null : work.getHours() / AISService.HOURS_IN_DAY; sb1.append("<td class=\"i24_tableItem\"><i>").append(aisService.getDescription(work.getWorkType())).append("</i></td>"); sb2.append("<td class=\"i24_tableItem\"><b>") .append(aisService.days(val)).append("</b></td>"); sum += val; } } sb1.append("<td class=\"i24_tableItem\" style=\"background-color: #EFEFEF\"><i>").append("Celkem").append("</i></td>"); sb2.append("<td class=\"i24_tableItem\" style=\"background-color: #EFEFEF\"><b>").append(aisService.days(sum)).append("</b></td>"); // if (uzivatel != null) { // sb1.append("<td class=\"i24_tableItem\"><i>").append("Saldo").append("</i></td>"); // sb2.append("<td class=\"i24_tableItem\"><b>").append(saldo(workMonthDTO.getSumOnSiteDays() - worked * AISService.HOURS_IN_DAY)).append("</b></td>"); // } if (!readonly) { sb2.append("<td><form action=\""+ (Boolean.TRUE.equals(redirect) ? "/api/work/html" : "/ais.jsp" ) + "\" method=\"get\">") .append("<input type=\"hidden\" name=\"year\" value=\"").append(year).append("\">") .append("<input type=\"hidden\" name=\"role\" value=\"").append(role).append("\">") .append("<input type=\"hidden\" name=\"month\" value=\"").append(month).append("\">") .append("<input type=\"hidden\" name=\"username\" value=\"").append(username).append("\">") .append("<input type=\"submit\" class=\"buttonSubmit\" value=\"&nbsp; Přepočítat\"/></form></td>"); } sb2.append("</tr>"); sb1.append(sb2); if (!readonly) { sb1.append("<td></td>"); } sb1.append("</tr></table>"); sb.append(sb1); return sb.toString(); } private boolean isPast(LocalDate actualMonthDay, LocalDate date) { return actualMonthDay.isAfter(date)|| actualMonthDay.equals(date); } private String saldo(Double value) { if (value == null) { return ""; } String time = aisService.formatAsTime(value); if ("-0:00".equals(time)) { time = "0:00"; } return "<span style=\"color:" + (time.startsWith("-") ? "#FF4136" : "#2ECC40") + "\">" + time + "</span>"; } private String hoursOnly(@Valid WorkHourDTO work) { if (work == null || work.getDate() == null) { return ""; } return "<span "+(work.isCorrected()? " title =\""+ aisService.hoursOriginalOnly(work) +"\"":"") +"style=\"color:" + (work.isCorrected() ? "#888888":"#000") +"\">" + aisService.hoursOnly(work) + "</span>"; } private String corrected(Double lunch, Double originalLunch) { if (lunch == null) { return ""; } return "<span "+(originalLunch != null ? " title =\""+ aisService.hours(originalLunch) +"\"":"") +"style=\"color:" + (originalLunch != null ? "#888888":"#000") +"\">" + aisService.hours(lunch) + "</span>"; } private String getDescription(@Valid WorkDayTypeDTO workDayType) { switch (workDayType) { case NATIONAL_HOLIDAY: return "<b>Státní svátek</b>"; case WORKDAY: return "Pracovní den"; default: return ""; } } /** * {@inheritDoc} */ @Override @CrossOrigin(origins = "*") public ResponseEntity<Void> setWork(@Valid WorkDTO work, @NotNull @Valid String username) { aisService.setWork(work, username); return new ResponseEntity<>(HttpStatus.OK); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.interactive.digitalsignature.visible; import java.awt.geom.AffineTransform; import java.awt.image.BufferedImage; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import javax.imageio.ImageIO; import org.apache.pdfbox.io.IOUtils; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.common.PDRectangle; /** * Class for visible signature design properties. Setters use param() instead of setParam() to allow * chaining. * * @author Vakhtang Koroghlishvili */ public class PDVisibleSignDesigner { private Float imageWidth; private Float imageHeight; private float xAxis; private float yAxis; private float pageHeight; private float pageWidth; private BufferedImage image; private String signatureFieldName = "sig"; private byte[] formatterRectangleParams = { 0, 0, 100, 50 }; private AffineTransform affineTransform = new AffineTransform(); private float imageSizeInPercents; private int rotation = 0; /** * Constructor. * * @param filename Path of the PDF file * @param imageStream image as a stream * @param page The 1-based page number for which the page size should be calculated. * @throws IOException */ public PDVisibleSignDesigner(String filename, InputStream imageStream, int page) throws IOException { // set visible signature image Input stream readImageStream(imageStream); // calculate height and width of document page calculatePageSizeFromFile(filename, page); } /** * Constructor. * * @param documentStream Original PDF document as stream * @param imageStream Image as a stream * @param page The 1-based page number for which the page size should be calculated. * @throws IOException */ public PDVisibleSignDesigner(InputStream documentStream, InputStream imageStream, int page) throws IOException { // set visible signature image Input stream readImageStream(imageStream); // calculate height and width of document page calculatePageSizeFromStream(documentStream, page); } /** * Constructor. * * @param document Already created PDDocument of your PDF document. * @param imageStream Image as a stream. * @param page The 1-based page number for which the page size should be calculated. * @throws IOException If we can't read, flush, or can't close stream. */ public PDVisibleSignDesigner(PDDocument document, InputStream imageStream, int page) throws IOException { readImageStream(imageStream); calculatePageSize(document, page); } /** * Constructor. * * @param filename Path of the PDF file * @param image * @param page The 1-based page number for which the page size should be calculated. * @throws IOException */ public PDVisibleSignDesigner(String filename, BufferedImage image, int page) throws IOException { // set visible signature image setImage(image); // calculate height and width of document page calculatePageSizeFromFile(filename, page); } /** * Constructor. * * @param documentStream Original PDF document as stream * @param image * @param page The 1-based page number for which the page size should be calculated. * @throws IOException */ public PDVisibleSignDesigner(InputStream documentStream, BufferedImage image, int page) throws IOException { // set visible signature image setImage(image); // calculate height and width of document page calculatePageSizeFromStream(documentStream, page); } /** * Constructor. * * @param document Already created PDDocument of your PDF document. * @param image * @param page The 1-based page number for which the page size should be calculated. */ public PDVisibleSignDesigner(PDDocument document, BufferedImage image, int page) { setImage(image); calculatePageSize(document, page); } /** * Constructor usable for signing existing signature fields. * * @param imageStream image as a stream * @throws IOException */ public PDVisibleSignDesigner(InputStream imageStream) throws IOException { // set visible signature image Input stream readImageStream(imageStream); } private void calculatePageSizeFromFile(String filename, int page) throws IOException { // create PD document PDDocument document = PDDocument.load(new File(filename)); // calculate height and width of document page calculatePageSize(document, page); document.close(); } private void calculatePageSizeFromStream(InputStream documentStream, int page) throws IOException { // create PD document PDDocument document = PDDocument.load(documentStream); // calculate height and width of document page calculatePageSize(document, page); document.close(); } /** * Each page of document can be different sizes. This method calculates the page size based on * the page media box. * * @param document * @param page The 1-based page number for which the page size should be calculated. * @throws IllegalArgumentException if the page argument is lower than 0. */ private void calculatePageSize(PDDocument document, int page) { if (page < 1) { throw new IllegalArgumentException("First page of pdf is 1, not " + page); } PDPage firstPage = document.getPage(page - 1); PDRectangle mediaBox = firstPage.getMediaBox(); pageHeight(mediaBox.getHeight()); pageWidth = mediaBox.getWidth(); imageSizeInPercents = 100; rotation = firstPage.getRotation() % 360; } /** * Adjust signature for page rotation. This is optional, call this after all x and y coordinates * have been set if you want the signature to be postioned regardless of page orientation. * * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner adjustForRotation() { switch (rotation) { case 90: float temp = yAxis; yAxis = pageHeight - xAxis - imageWidth; xAxis = temp; temp = imageHeight; imageHeight = imageWidth; imageWidth = temp; affineTransform = new AffineTransform(0, 0.5, -2, 0, 100, 0); break; case 180: float newX = pageWidth - xAxis - imageWidth; float newY = pageHeight - yAxis - imageHeight; xAxis = newX; yAxis = newY; affineTransform = new AffineTransform(-1, 0, 0, -1, 100, 50); break; case 270: temp = xAxis; xAxis = pageWidth - yAxis - imageHeight; yAxis = temp; temp = imageHeight; imageHeight = imageWidth; imageWidth = temp; affineTransform = new AffineTransform(0, -0.5, 2, 0, 0, 50); break; case 0: default: break; } return this; } /** * Set the image for the signature. * * @param path Path of the image file. * @return Visible Signature Configuration Object * @throws IOException */ public PDVisibleSignDesigner signatureImage(String path) throws IOException { InputStream in = null; try { in = new BufferedInputStream(new FileInputStream(path)); readImageStream(in); } finally { IOUtils.closeQuietly(in); } return this; } /** * Zoom signature image with some percent. * * @param percent increase image with x percent. * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner zoom(float percent) { imageHeight += (imageHeight * percent) / 100; imageWidth += (imageWidth * percent) / 100; return this; } /** * * @param x - x coordinate * @param y - y coordinate * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner coordinates(float x, float y) { xAxis(x); yAxis(y); return this; } /** * * @return xAxis - gets x coordinates */ public float getxAxis() { return xAxis; } /** * * @param xAxis - x coordinate * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner xAxis(float xAxis) { this.xAxis = xAxis; return this; } /** * * @return yAxis */ public float getyAxis() { return yAxis; } /** * * @param yAxis * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner yAxis(float yAxis) { this.yAxis = yAxis; return this; } /** * * @return signature image width */ public float getWidth() { return imageWidth; } /** * * @param width signature image width * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner width(float width) { this.imageWidth = width; return this; } /** * * @return signature image height */ public float getHeight() { return imageHeight; } /** * * @param height signature image height * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner height(float height) { this.imageHeight = height; return this; } /** * * @return template height */ protected float getTemplateHeight() { return getPageHeight(); } /** * * @param templateHeight * @return Visible Signature Configuration Object */ private PDVisibleSignDesigner pageHeight(float templateHeight) { this.pageHeight = templateHeight; return this; } /** * * @return signature field name */ public String getSignatureFieldName() { return signatureFieldName; } /** * * @param signatureFieldName * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner signatureFieldName(String signatureFieldName) { this.signatureFieldName = signatureFieldName; return this; } /** * * @return image Image */ public BufferedImage getImage() { return image; } /** * Read the image stream of the signature and set height and width. * * @param stream stream of your visible signature image * @throws IOException If we can't read, flush, or close stream of image */ private void readImageStream(InputStream stream) throws IOException { ImageIO.setUseCache(false); setImage(ImageIO.read(stream)); } /** * Set image and its height and width. * * @param image */ private void setImage(BufferedImage image) { this.image = image; imageHeight = (float) image.getHeight(); imageWidth = (float) image.getWidth(); } /** * @return Affine Transform parameters for PDF Matrix * * @deprecated use {@link #getTransform() }. */ @Deprecated public byte[] getAffineTransformParams() { return new byte[] { (byte) affineTransform.getScaleX(), (byte) affineTransform.getShearY(), (byte) affineTransform.getShearX(), (byte) affineTransform.getScaleY(), (byte) affineTransform.getTranslateX(), (byte) affineTransform.getTranslateY() }; } /** * @return Affine Transform parameters for PDF Matrix */ public AffineTransform getTransform() { return affineTransform; } /** * * @param affineTransformParams * @return Visible Signature Configuration Object * @deprecated use {@link #transform}. */ @Deprecated public PDVisibleSignDesigner affineTransformParams(byte[] affineTransformParams) { affineTransform = new AffineTransform(affineTransformParams[0], affineTransformParams[1], affineTransformParams[2], affineTransformParams[3], affineTransformParams[4], affineTransformParams[5]); return this; } /** * * @param affineTransform * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner transform(AffineTransform affineTransform) { this.affineTransform = new AffineTransform(affineTransform); return this; } /** * * @return formatter PDRectanle parameters */ public byte[] getFormatterRectangleParams() { return formatterRectangleParams; } /** * Sets formatter PDRectangle * * @param formatterRectangleParams * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner formatterRectangleParams(byte[] formatterRectangleParams) { this.formatterRectangleParams = formatterRectangleParams; return this; } /** * * @return page width */ public float getPageWidth() { return pageWidth; } /** * * @param pageWidth pageWidth * @return Visible Signature Configuration Object */ public PDVisibleSignDesigner pageWidth(float pageWidth) { this.pageWidth = pageWidth; return this; } /** * * @return page height */ public float getPageHeight() { return pageHeight; } /** * get image size in percents * @return the image size in percent */ public float getImageSizeInPercents() { return imageSizeInPercents; } /** * * @param imageSizeInPercents */ public void imageSizeInPercents(float imageSizeInPercents) { this.imageSizeInPercents = imageSizeInPercents; } /** * returns visible signature text * @return the visible signature's text */ public String getSignatureText() { throw new UnsupportedOperationException("That method is not yet implemented"); } /** * * @param signatureText - adds the text on visible signature * @return the signature design */ public PDVisibleSignDesigner signatureText(String signatureText) { throw new UnsupportedOperationException("That method is not yet implemented"); } }
/* * Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.sl.nodes; import java.math.BigInteger; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.dsl.ImplicitCast; import com.oracle.truffle.api.dsl.TypeCast; import com.oracle.truffle.api.dsl.TypeCheck; import com.oracle.truffle.api.dsl.TypeSystem; import com.oracle.truffle.sl.SLLanguage; import com.oracle.truffle.sl.runtime.SLBigNumber; import com.oracle.truffle.sl.runtime.SLFunction; import com.oracle.truffle.sl.runtime.SLNull; /** * The type system of SL, as explained in {@link SLLanguage}. Based on the {@link TypeSystem} * annotation, the Truffle DSL generates the subclass {@link SLTypesGen} with type test and type * conversion methods for all types. In this class, we only cover types where the automatically * generated ones would not be sufficient. */ @TypeSystem({long.class, SLBigNumber.class, boolean.class, String.class, SLFunction.class, SLNull.class}) public abstract class SLTypes { /** * Example of a manually specified type check that replaces the automatically generated type * check that the Truffle DSL would generate. For {@link SLNull}, we do not need an * {@code instanceof} check, because we know that there is only a {@link SLNull#SINGLETON * singleton} instance. */ @TypeCheck(SLNull.class) public static boolean isSLNull(Object value) { return value == SLNull.SINGLETON; } /** * Example of a manually specified type cast that replaces the automatically generated type cast * that the Truffle DSL would generate. For {@link SLNull}, we do not need an actual cast, * because we know that there is only a {@link SLNull#SINGLETON singleton} instance. */ @TypeCast(SLNull.class) public static SLNull asSLNull(Object value) { assert isSLNull(value); return SLNull.SINGLETON; } /** * Informs the Truffle DSL that a primitive {@code long} value can be used in all * specializations where a {@link SLBigNumber} is expected. This models the semantic of SL: It * only has an arbitrary precision Number type (implemented as {@link SLBigNumber}, and * {@code long} is only used as a performance optimization to avoid the costly * {@link SLBigNumber} arithmetic for values that fit into a 64-bit primitive value. */ @ImplicitCast @TruffleBoundary public static SLBigNumber castBigNumber(long value) { return new SLBigNumber(BigInteger.valueOf(value)); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.medialive.model; import javax.annotation.Generated; /** * H265 Level */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public enum H265Level { H265_LEVEL_1("H265_LEVEL_1"), H265_LEVEL_2("H265_LEVEL_2"), H265_LEVEL_2_1("H265_LEVEL_2_1"), H265_LEVEL_3("H265_LEVEL_3"), H265_LEVEL_3_1("H265_LEVEL_3_1"), H265_LEVEL_4("H265_LEVEL_4"), H265_LEVEL_4_1("H265_LEVEL_4_1"), H265_LEVEL_5("H265_LEVEL_5"), H265_LEVEL_5_1("H265_LEVEL_5_1"), H265_LEVEL_5_2("H265_LEVEL_5_2"), H265_LEVEL_6("H265_LEVEL_6"), H265_LEVEL_6_1("H265_LEVEL_6_1"), H265_LEVEL_6_2("H265_LEVEL_6_2"), H265_LEVEL_AUTO("H265_LEVEL_AUTO"); private String value; private H265Level(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return H265Level corresponding to the value * * @throws IllegalArgumentException * If the specified value does not map to one of the known values in this enum. */ public static H265Level fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } for (H265Level enumEntry : H265Level.values()) { if (enumEntry.toString().equals(value)) { return enumEntry; } } throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } }
package ru.newTestPackage.sandbox; import java.awt.*; import java.util.*; import java.util.List; public class Collection { public static void main(String[] args) { String[] langs = {"Java", "C#", "Python", "PHP"}; List<String> lang = new ArrayList<String>(); lang.add("Java"); lang.add("C#"); lang.add("Python"); lang.add("C++"); for(String l : lang){ System.out.println("Я хочу выучить " + l); } } }
package com.elong.pb.newdda.client.jdbc.adapter; import com.elong.pb.newdda.client.jdbc.operation.AbstractUnsupportedOperationResultSet; import com.google.common.base.Preconditions; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; public abstract class AbstractResultSetAdapter extends AbstractUnsupportedOperationResultSet { private final List<ResultSet> resultSets; private boolean closed; public AbstractResultSetAdapter(final List<ResultSet> resultSets) throws SQLException { Preconditions.checkArgument(!resultSets.isEmpty()); this.resultSets = resultSets; } @Override public final void close() throws SQLException { for (ResultSet each : resultSets) { each.close(); } closed = true; } @Override public final boolean isClosed() throws SQLException { return closed; } @Override public final void setFetchDirection(final int direction) throws SQLException { for (ResultSet each : resultSets) { each.setFetchDirection(direction); } } @Override public final void setFetchSize(final int rows) throws SQLException { for (ResultSet each : resultSets) { each.setFetchSize(rows); } } //============================================================================================= set get method start ========================================================================= public List<ResultSet> getResultSets() { return resultSets; } //============================================================================================= set get method end ========================================================================= }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.management.internal.cli.commands; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.springframework.shell.core.CommandMarker; import org.springframework.shell.core.annotation.CliAvailabilityIndicator; import org.springframework.shell.core.annotation.CliCommand; import org.springframework.shell.core.annotation.CliOption; import com.gemstone.gemfire.SystemFailure; import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.CacheFactory; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.execute.Execution; import com.gemstone.gemfire.cache.execute.Function; import com.gemstone.gemfire.cache.execute.FunctionException; import com.gemstone.gemfire.cache.execute.FunctionService; import com.gemstone.gemfire.cache.execute.ResultCollector; import com.gemstone.gemfire.distributed.DistributedMember; import com.gemstone.gemfire.internal.ClassPathLoader; import com.gemstone.gemfire.internal.cache.GemFireCacheImpl; import com.gemstone.gemfire.management.DistributedRegionMXBean; import com.gemstone.gemfire.management.ManagementService; import com.gemstone.gemfire.management.cli.CliMetaData; import com.gemstone.gemfire.management.cli.ConverterHint; import com.gemstone.gemfire.management.cli.Result; import com.gemstone.gemfire.management.cli.Result.Status; import com.gemstone.gemfire.management.internal.MBeanJMXAdapter; import com.gemstone.gemfire.management.internal.cli.AbstractCliAroundInterceptor; import com.gemstone.gemfire.management.internal.cli.CliUtil; import com.gemstone.gemfire.management.internal.cli.GfshParseResult; import com.gemstone.gemfire.management.internal.cli.LogWrapper; import com.gemstone.gemfire.management.internal.cli.functions.CliFunctionResult; import com.gemstone.gemfire.management.internal.cli.functions.ListFunctionFunction; import com.gemstone.gemfire.management.internal.cli.functions.UnregisterFunction; import com.gemstone.gemfire.management.internal.cli.functions.UserFunctionExecution; import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings; import com.gemstone.gemfire.management.internal.cli.result.CommandResultException; import com.gemstone.gemfire.management.internal.cli.result.CompositeResultData; import com.gemstone.gemfire.management.internal.cli.result.ErrorResultData; import com.gemstone.gemfire.management.internal.cli.result.ResultBuilder; import com.gemstone.gemfire.management.internal.cli.result.TabularResultData; import com.gemstone.gemfire.management.internal.cli.shell.Gfsh; /** * @author David Hoots * * @since 7.0 */ @SuppressWarnings("unused") public class FunctionCommands implements CommandMarker { private final ListFunctionFunction listFunctionFunction = new ListFunctionFunction(); private Gfsh getGfsh() { return Gfsh.getCurrentInstance(); } @CliCommand(value = CliStrings.EXECUTE_FUNCTION, help = CliStrings.EXECUTE_FUNCTION__HELP) @CliMetaData(relatedTopic = { CliStrings.TOPIC_GEMFIRE_FUNCTION }) public Result executeFunction( //TODO: Add optioncontext for functionID @CliOption(key = CliStrings.EXECUTE_FUNCTION__ID, mandatory = true, help = CliStrings.EXECUTE_FUNCTION__ID__HELP) String functionId, @CliOption(key = CliStrings.EXECUTE_FUNCTION__ONGROUPS, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, optionContext = ConverterHint.MEMBERGROUP, help = CliStrings.EXECUTE_FUNCTION__ONGROUPS__HELP) String[] onGroups, @CliOption(key = CliStrings.EXECUTE_FUNCTION__ONMEMBER, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, optionContext = ConverterHint.MEMBERIDNAME, help = CliStrings.EXECUTE_FUNCTION__ONMEMBER__HELP) String onMember, @CliOption(key = CliStrings.EXECUTE_FUNCTION__ONREGION, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, optionContext = ConverterHint.REGIONPATH, help = CliStrings.EXECUTE_FUNCTION__ONREGION__HELP) String onRegion, @CliOption(key = CliStrings.EXECUTE_FUNCTION__ARGUMENTS, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, help = CliStrings.EXECUTE_FUNCTION__ARGUMENTS__HELP) String[] arguments, @CliOption(key = CliStrings.EXECUTE_FUNCTION__RESULTCOLLECTOR, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, help = CliStrings.EXECUTE_FUNCTION__RESULTCOLLECTOR__HELP) String resultCollector, @CliOption(key = CliStrings.EXECUTE_FUNCTION__FILTER, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, help = CliStrings.EXECUTE_FUNCTION__FILTER__HELP) String filterString) { Result result = null; CompositeResultData executeFunctionResultTable = ResultBuilder .createCompositeResultData(); TabularResultData resultTable = executeFunctionResultTable.addSection().addTable( "Table1"); String headerText = "Execution summary"; resultTable.setHeader(headerText); ResultCollector resultCollectorInstance = null; Function function; Set<String> filters = new HashSet<String>(); Execution execution = null; if (functionId != null){ functionId = functionId.trim(); } if (onRegion != null){ onRegion = onRegion.trim(); } if (onMember != null){ onMember = onMember.trim(); } if (filterString != null){ filterString = filterString.trim(); } try { // validate otherwise return right away. no need to process anything if (functionId == null || functionId.length() == 0) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.EXECUTE_FUNCTION__MSG__MISSING_FUNCTIONID); result = ResultBuilder.buildResult(errorResultData); return result; } if (onRegion != null && onMember != null && onGroups != null) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.EXECUTE_FUNCTION__MSG__OPTIONS); result = ResultBuilder.buildResult(errorResultData); return result; } else if (onRegion != null && onMember != null) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.EXECUTE_FUNCTION__MSG__OPTIONS); result = ResultBuilder.buildResult(errorResultData); return result; } else if (onMember != null && onGroups != null) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.EXECUTE_FUNCTION__MSG__OPTIONS); result = ResultBuilder.buildResult(errorResultData); return result; } else if (onRegion != null && onGroups != null) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.EXECUTE_FUNCTION__MSG__OPTIONS); result = ResultBuilder.buildResult(errorResultData); return result; } else if (onRegion != null && onMember != null && onGroups != null) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.EXECUTE_FUNCTION__MSG__OPTIONS); result = ResultBuilder.buildResult(errorResultData); return result; }else if ( (onRegion == null || onRegion.length() == 0) && (filterString != null ) ){ ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.EXECUTE_FUNCTION__MSG__MEMBER_SHOULD_NOT_HAVE_FILTER_FOR_EXECUTION); result = ResultBuilder.buildResult(errorResultData); return result; } Cache cache = CacheFactory.getAnyInstance(); if (resultCollector != null) { resultCollectorInstance = (ResultCollector) ClassPathLoader.getLatest().forName(resultCollector).newInstance(); } if (filterString != null && filterString.length() > 0) { filters.add(filterString); } if (onRegion == null && onMember == null && onGroups == null) { // run function on all the members excluding locators bug#46113 //if user wish to execute on locator then he can choose --member or --group option Set<DistributedMember> dsMembers = CliUtil.getAllNormalMembers(cache); if (dsMembers.size() > 0) { function = new UserFunctionExecution(); LogWrapper.getInstance().info( CliStrings.format( CliStrings.EXECUTE_FUNCTION__MSG__EXECUTING_0_ON_ENTIRE_DS, functionId)); for (DistributedMember member : dsMembers) { executeAndGetResults(functionId, filterString, resultCollector, arguments, cache, member, resultTable, onRegion); } return ResultBuilder.buildResult(resultTable); } else { return ResultBuilder .createUserErrorResult(CliStrings.EXECUTE_FUNCTION__MSG__DS_HAS_NO_MEMBERS); } } else if (onRegion != null && onRegion.length() > 0) { if (cache.getRegion(onRegion) == null) { // find a member where region is present DistributedRegionMXBean bean = ManagementService .getManagementService(GemFireCacheImpl.getInstance()) .getDistributedRegionMXBean(onRegion); if (bean == null) { bean = ManagementService.getManagementService( GemFireCacheImpl.getInstance()).getDistributedRegionMXBean(Region.SEPARATOR + onRegion); if (bean == null) { return ResultBuilder.createGemFireErrorResult(CliStrings.format( CliStrings.EXECUTE_FUNCTION__MSG__MXBEAN_0_FOR_NOT_FOUND, onRegion)); } } DistributedMember member = null; String[] membersName = bean.getMembers(); Set<DistributedMember> dsMembers = CliUtil.getAllMembers(cache); Iterator it = dsMembers.iterator(); boolean matchFound = false; if (membersName.length > 0) { while (it.hasNext() && matchFound == false) { DistributedMember dsmember = (DistributedMember) it.next(); for (String memberName : membersName) { if (MBeanJMXAdapter.getMemberNameOrId(dsmember).equals( memberName)) { member = dsmember; matchFound = true; break; } } } } if (matchFound == true) { executeAndGetResults(functionId, filterString, resultCollector, arguments, cache, member, resultTable, onRegion); return ResultBuilder.buildResult(resultTable); } else { return ResultBuilder.createGemFireErrorResult(CliStrings.format( CliStrings.EXECUTE_FUNCTION__MSG__NO_ASSOCIATED_MEMBER_REGION, " " + onRegion)); } } else { execution = FunctionService.onRegion(cache.getRegion(onRegion)); if (execution != null) { if (resultCollectorInstance != null) { execution = execution.withCollector(resultCollectorInstance); } if (filters != null && filters.size() > 0) { execution = execution.withFilter(filters); } if (arguments != null && arguments.length > 0) { execution = execution.withArgs(arguments); } try { List<Object> results = (List<Object>) execution.execute( functionId).getResult(); if (results.size() > 0) { StringBuilder strResult = new StringBuilder(); for (Object obj : results) { strResult.append(obj); } toTabularResultData(resultTable, cache.getDistributedSystem() .getDistributedMember().getId(), strResult.toString()); } return ResultBuilder.buildResult(resultTable); } catch (FunctionException e) { return ResultBuilder .createGemFireErrorResult(CliStrings .format( CliStrings.EXECUTE_FUNCTION__MSG__ERROR_IN_EXECUTING_0_ON_REGION_1_DETAILS_2, functionId, onRegion, e.getMessage())); } } else { return ResultBuilder .createGemFireErrorResult(CliStrings .format( CliStrings.EXECUTE_FUNCTION__MSG__ERROR_IN_EXECUTING_0_ON_REGION_1_DETAILS_2, functionId, onRegion, CliStrings.EXECUTE_FUNCTION__MSG__ERROR_IN_RETRIEVING_EXECUTOR)); } } } else if (onGroups != null) { // execute on group members Set<DistributedMember> dsMembers = new HashSet<DistributedMember>(); for(String grp : onGroups){ dsMembers.addAll(cache.getDistributedSystem().getGroupMembers(grp)); } StringBuilder successMessage = new StringBuilder(); if (dsMembers.size() > 0) { for (DistributedMember member : dsMembers) { executeAndGetResults(functionId, filterString, resultCollector, arguments, cache, member, resultTable, onRegion); } return ResultBuilder.buildResult(resultTable); } else { StringBuilder grps = new StringBuilder(); for(String grp : onGroups){ grps.append(grp); grps.append(", "); } return ResultBuilder.createUserErrorResult(CliStrings.format( CliStrings.EXECUTE_FUNCTION__MSG__GROUPS_0_HAS_NO_MEMBERS, grps.toString().substring(0,grps.toString().length() -1))); } } else if (onMember != null && onMember.length() > 0) { DistributedMember member = CliUtil .getDistributedMemberByNameOrId(onMember); // fix for bug 45658 if (member != null) { executeAndGetResults(functionId, filterString, resultCollector, arguments, cache, member, resultTable, onRegion); } else { toTabularResultData(resultTable, onMember, CliStrings .format(CliStrings.EXECUTE_FUNCTION__MSG__NO_ASSOCIATED_MEMBER + " " + onMember)); } return ResultBuilder.buildResult(resultTable); } } catch (InstantiationException e) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT) .addLine(e.getMessage()); result = ResultBuilder.buildResult(errorResultData); return result; } catch (IllegalAccessException e) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT) .addLine(e.getMessage()); result = ResultBuilder.buildResult(errorResultData); return result; } catch (IllegalArgumentException e) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT) .addLine(e.getMessage()); result = ResultBuilder.buildResult(errorResultData); return result; } catch (SecurityException e) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT) .addLine(e.getMessage()); result = ResultBuilder.buildResult(errorResultData); return result; } catch (Exception e) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT) .addLine(e.getMessage()); result = ResultBuilder.buildResult(errorResultData); return result; } return result; } DistributedMember getMember(Cache cache, String memberNameOrId) { DistributedMember member = null; Set<DistributedMember> dsMembers = CliUtil.getAllMembers(cache) ; Iterator<DistributedMember> it = dsMembers.iterator(); while (it.hasNext()) { DistributedMember tempMember = (DistributedMember) it.next(); if (memberNameOrId.equals(tempMember.getId()) || memberNameOrId.equals(tempMember.getName())) { return tempMember; } } return member; } void executeAndGetResults(String functionId, String filterString, String resultCollector, String[] arguments, Cache cache, DistributedMember member, TabularResultData resultTable, String onRegion) { StringBuilder resultMessege = new StringBuilder(); try { Function function = new UserFunctionExecution(); Object[] args = new Object[5]; args[0] = functionId; if(filterString != null){ args[1] = filterString; } if (resultCollector != null) { args[2] = resultCollector; } if (arguments != null && arguments.length > 0) { args[3] = new String(); for (String str : arguments) { // send via CSV separated value format if(str != null){ args[3] = args[3] + str + ","; } } } args[4] = onRegion; Execution execution = FunctionService.onMember(member).withArgs(args); if (execution != null) { List<Object> results = (List<Object>) execution.execute(function) .getResult(); if (results != null) { for (Object resultObj : results) { if (resultObj != null) { if (resultObj instanceof String) { resultMessege.append(((String) resultObj)); } else if (resultObj instanceof Exception) { resultMessege.append(((Exception)resultObj).getMessage()); } else { resultMessege.append(resultObj); } } } } toTabularResultData(resultTable, member.getId(), resultMessege .toString()); }else{ toTabularResultData(resultTable, member.getId(), CliStrings.EXECUTE_FUNCTION__MSG__ERROR_IN_RETRIEVING_EXECUTOR); } } catch (FunctionException e) { resultMessege.append(CliStrings.format( CliStrings.EXECUTE_FUNCTION__MSG__COULD_NOT_EXECUTE_FUNCTION_0_ON_MEMBER_1_ERROR_2, functionId, member.getId(), e.getMessage())); toTabularResultData(resultTable, member.getId(), resultMessege .toString()); } catch (Exception e) { resultMessege.append(CliStrings.format( CliStrings.EXECUTE_FUNCTION__MSG__COULD_NOT_EXECUTE_FUNCTION_0_ON_MEMBER_1_ERROR_2, functionId, member.getId(), e.getMessage())); toTabularResultData(resultTable, member.getId(), resultMessege .toString()); } } protected void toTabularResultData(TabularResultData table, String memberId, String memberResult) { String newLine = System.getProperty("line.separator"); table.accumulate("Member ID/Name", memberId); table.accumulate("Function Execution Result", memberResult); } @CliCommand(value = CliStrings.DESTROY_FUNCTION, help = CliStrings.DESTROY_FUNCTION__HELP) @CliMetaData(relatedTopic = { CliStrings.TOPIC_GEMFIRE_FUNCTION } , interceptor = "com.gemstone.gemfire.management.internal.cli.commands.FunctionCommands$Interceptor") //TODO: Add optioncontext for functionId public Result destroyFunction( @CliOption(key = CliStrings.DESTROY_FUNCTION__ID, mandatory = true, help = CliStrings.DESTROY_FUNCTION__HELP) String functionId, @CliOption(key = CliStrings.DESTROY_FUNCTION__ONGROUPS, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, optionContext = ConverterHint.MEMBERGROUP, help = CliStrings.DESTROY_FUNCTION__ONGROUPS__HELP) String[] groups, @CliOption(key = CliStrings.DESTROY_FUNCTION__ONMEMBER, unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE, optionContext = ConverterHint.MEMBERIDNAME, help = CliStrings.DESTROY_FUNCTION__ONMEMBER__HELP) String memberId) { Result result = null; try { Cache cache = CacheFactory.getAnyInstance(); Set<DistributedMember> dsMembers = new HashSet<DistributedMember>(); if(groups != null && memberId != null){ return ResultBuilder.createUserErrorResult(CliStrings.DESTROY_FUNCTION__MSG__PROVIDE_OPTION); }else if (groups != null && groups.length > 0 ) { //execute on group members for(String grp : groups){ dsMembers.addAll(cache.getDistributedSystem().getGroupMembers(grp)); } @SuppressWarnings("unchecked") Result results = executeFunction(cache, dsMembers, functionId); return results; }else if (memberId != null){ //execute on member dsMembers.add(getMember(cache , memberId)); @SuppressWarnings("unchecked") Result results = executeFunction(cache, dsMembers, functionId); return results; } else{ //no option provided. @SuppressWarnings("unchecked") Result results = executeFunction(cache, cache.getMembers(), functionId); return results; } } catch (Exception e) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( e.getMessage()); result = ResultBuilder.buildResult(errorResultData); return result; } } /** * Interceptor used by gfsh to intercept execution of destroy. */ public static class Interceptor extends AbstractCliAroundInterceptor { @Override public Result preExecution(GfshParseResult parseResult) { Map<String, String> paramValueMap = parseResult.getParamValueStrings(); Set< Entry<String, String> > setEnvMap = paramValueMap.entrySet(); String onGroup = paramValueMap.get(CliStrings.DESTROY_FUNCTION__ONGROUPS); String onMember = paramValueMap.get(CliStrings.DESTROY_FUNCTION__ONMEMBER); if ((onGroup == null && onMember == null ) ){ Response response = readYesNo("Do you really want to destroy "+paramValueMap.get(CliStrings.DESTROY_FUNCTION__ID)+ " on entire DS?", Response.NO); if (response == Response.NO) { return ResultBuilder.createShellClientAbortOperationResult("Aborted destroy of " +paramValueMap.get(CliStrings.DESTROY_FUNCTION__ID) ); }else{ return ResultBuilder.createInfoResult("Destroying " +paramValueMap.get(CliStrings.DESTROY_FUNCTION__ID) ); } }else{ return ResultBuilder.createInfoResult("Destroying " +paramValueMap.get(CliStrings.DESTROY_FUNCTION__ID) ); } } @Override public Result postExecution(GfshParseResult parseResult, Result commandResult) { return commandResult; } } Result executeFunction(Cache cache, Set<DistributedMember> DsMembers, String functionId){ //unregister on a set of of members Function unregisterFunction = new UnregisterFunction(); FunctionService.registerFunction(unregisterFunction); List resultList = null; if(DsMembers.isEmpty()){ return ResultBuilder.createInfoResult("No members for execution"); } Object[] obj = new Object[1]; obj[0] = functionId; Execution execution = FunctionService.onMembers(DsMembers).withArgs(obj); if (execution == null) { cache.getLogger().error("executeUnregister execution is null") ; ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( CliStrings.DESTROY_FUNCTION__MSG__CANNOT_EXECUTE); return (ResultBuilder.buildResult(errorResultData)); } try { resultList = (ArrayList) execution.execute(unregisterFunction) .getResult(); } catch (FunctionException ex) { ErrorResultData errorResultData = ResultBuilder.createErrorResultData() .setErrorCode(ResultBuilder.ERRORCODE_DEFAULT).addLine( ex.getMessage()); return (ResultBuilder.buildResult(errorResultData)); } String resultStr = ((String) resultList.get(0)); if (resultStr.equals("Succeeded in unregistering")){ StringBuilder members = new StringBuilder(); for(DistributedMember member : DsMembers){ members.append(member.getId()); members.append(","); } return ResultBuilder.createInfoResult("Destroyed " + functionId + " Successfully on " + members.toString().substring(0,members.toString().length() -1)); }else{ return ResultBuilder.createInfoResult("Failed in unregistering"); } } @CliCommand(value = CliStrings.LIST_FUNCTION, help = CliStrings.LIST_FUNCTION__HELP) @CliMetaData(relatedTopic = { CliStrings.TOPIC_GEMFIRE_FUNCTION }) public Result listFunction( @CliOption(key = CliStrings.LIST_FUNCTION__MATCHES, help = CliStrings.LIST_FUNCTION__MATCHES__HELP)String matches, @CliOption(key = CliStrings.LIST_FUNCTION__GROUP, optionContext = ConverterHint.MEMBERGROUP, help = CliStrings.LIST_FUNCTION__GROUP__HELP) @CliMetaData (valueSeparator = ",") String groups, @CliOption(key = CliStrings.LIST_FUNCTION__MEMBER, optionContext = ConverterHint.MEMBERIDNAME, help = CliStrings.LIST_FUNCTION__MEMBER__HELP) @CliMetaData (valueSeparator = ",") String members) { TabularResultData tabularData = ResultBuilder.createTabularResultData(); boolean accumulatedData = false; Cache cache = CacheFactory.getAnyInstance(); Set<DistributedMember> targetMembers; try { targetMembers = CliUtil.findAllMatchingMembers(groups, members); } catch (CommandResultException crex) { return crex.getResult(); } try { ResultCollector<?, ?> rc = CliUtil.executeFunction(this.listFunctionFunction, new Object[] { matches }, targetMembers); List<CliFunctionResult> results = CliFunctionResult.cleanResults((List<?>) rc.getResult()); for (CliFunctionResult result : results) { if (result.getThrowable() != null) { tabularData.accumulate("Member", result.getMemberIdOrName()); tabularData.accumulate("Function", "<ERROR: " + result.getThrowable().getMessage() + ">"); accumulatedData = true; tabularData.setStatus(Status.ERROR); } else if (result.isSuccessful()) { String[] strings = (String[]) result.getSerializables(); Arrays.sort(strings); for (int i = 0; i < strings.length; i++) { tabularData.accumulate("Member", result.getMemberIdOrName()); tabularData.accumulate("Function", strings[i]); accumulatedData = true; } } } if (!accumulatedData) { return ResultBuilder.createInfoResult(CliStrings.LIST_FUNCTION__NO_FUNCTIONS_FOUND_ERROR_MESSAGE); } return ResultBuilder.buildResult(tabularData); } catch (VirtualMachineError e) { SystemFailure.initiateFailure(e); throw e; } catch (Throwable th) { SystemFailure.checkFailure(); return ResultBuilder.createGemFireErrorResult("Exception while attempting to list functions: " + th.getMessage()); } } @CliAvailabilityIndicator({CliStrings.EXECUTE_FUNCTION, CliStrings.DESTROY_FUNCTION, CliStrings.LIST_FUNCTION}) public boolean functionCommandsAvailable() { boolean isAvailable = true; //always available on server if (CliUtil.isGfshVM()) { //in gfsh check if connected isAvailable = getGfsh() != null && getGfsh().isConnectedAndReady(); } return isAvailable; } }
package com.samsung.android.sdk.iap.lib.helper.task; import android.content.Context; import android.os.Bundle; import android.util.Log; import com.samsung.android.iap.IAPConnector; import plugin.samsung.iap.R; import com.samsung.android.sdk.iap.lib.helper.HelperDefine; import com.samsung.android.sdk.iap.lib.service.ProductsDetails; import com.samsung.android.sdk.iap.lib.vo.ProductVo; import java.util.ArrayList; /** * Asynchronized Task to load a list of items */ public class GetProductsDetailsTask extends BaseTask { private static final String TAG = GetProductsDetailsTask.class.getSimpleName(); private String mProductIds = ""; ArrayList<ProductVo> mProductsDetails = new ArrayList<ProductVo>(); public GetProductsDetailsTask ( ProductsDetails _baseService, IAPConnector _iapConnector, Context _context, String _productIDs, boolean _showErrorDialog, int _mode ) { super(_baseService, _iapConnector, _context, _showErrorDialog, _mode); mProductIds = _productIDs; _baseService.setProductsDetails(mProductsDetails); } @Override protected Boolean doInBackground(String... params) { try { int pagingIndex = 1; do { // 1) call getProductsDetails() method of IAPService // ---- Order Priority ---- // 1. if productIds is not empty, the infomations abouts products included in the productIds are returned // 2. if productIds is empty, the infomations about all products in this package are returned on a page by page // ============================================================ Bundle bundle = mIapConnector.getProductsDetails( mPackageName, mProductIds, pagingIndex, mMode); // ============================================================ // 2) save status code, error string and extra String. // ============================================================ if (bundle != null) { mErrorVo.setError(bundle.getInt(HelperDefine.KEY_NAME_STATUS_CODE), bundle.getString(HelperDefine.KEY_NAME_ERROR_STRING)); } else { mErrorVo.setError( HelperDefine.IAP_ERROR_COMMON, mContext.getString( R.string.mids_sapps_pop_unknown_error_occurred)); } // ============================================================ // 3) If item list is loaded successfully, // make item list by Bundle data // ============================================================ if (mErrorVo.getErrorCode() == HelperDefine.IAP_ERROR_NONE) { if (bundle != null) { String nextPagingIndex = bundle.getString(HelperDefine.NEXT_PAGING_INDEX); if (nextPagingIndex != null && nextPagingIndex.length() > 0) { pagingIndex = Integer.parseInt(nextPagingIndex); Log.i(TAG, "PagingIndex = " + nextPagingIndex); } else { pagingIndex = -1; } ArrayList<String> productStringList = bundle.getStringArrayList(HelperDefine.KEY_NAME_RESULT_LIST); if (productStringList != null) { for (String productString : productStringList) { ProductVo productVo = new ProductVo(productString); mProductsDetails.add(productVo); } } else { Log.i(TAG, "Bundle Value 'RESULT_LIST' is null."); } } } // ============================================================ // 4) If failed, print log. // ============================================================ else { Log.e(TAG, mErrorVo.getErrorString()); return true; } // ============================================================ } while (pagingIndex > 0); } catch (Exception e) { mErrorVo.setError( HelperDefine.IAP_ERROR_COMMON, mContext.getString( R.string.mids_sapps_pop_unknown_error_occurred)); e.printStackTrace(); return false; } return true; } }
/******************************************************************************* * (c) Copyright 2014 Hewlett-Packard Development Company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * *******************************************************************************/ package io.cloudslang.lang.runtime.navigations; import com.hp.oo.sdk.content.annotations.Param; import io.cloudslang.lang.entities.ScoreLangConstants; import io.cloudslang.lang.runtime.env.RunEnvironment; import io.cloudslang.lang.runtime.events.LanguageEventData; import io.cloudslang.lang.runtime.steps.AbstractExecutionData; import io.cloudslang.score.lang.ExecutionRuntimeServices; import org.apache.commons.lang3.tuple.Pair; import static io.cloudslang.score.api.execution.ExecutionParametersConsts.EXECUTION_RUNTIME_SERVICES; /** * @author stoneo * @version $Id$ * @since 22/10/2014 */ public class Navigations { /** * Returns the next step position to navigate to. * In case an error key was set in the runtime services, throw an error event and return null. * * @param runEnv the run environment * @param executionRuntimeServices the runtime services * @return the step id the score engine needs to navigate to the next step */ public Long navigate(@Param(ScoreLangConstants.RUN_ENV) RunEnvironment runEnv, @Param(EXECUTION_RUNTIME_SERVICES) ExecutionRuntimeServices executionRuntimeServices) { // If we have an error key stored, we fire an error event and return null as the next position if (executionRuntimeServices.hasStepErrorKey()) { AbstractExecutionData.fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.SLANG_EXECUTION_EXCEPTION, "Error detected during step", LanguageEventData.StepType.NAVIGATION, null, Pair.of(LanguageEventData.EXCEPTION, executionRuntimeServices.getStepErrorKey())); throw new RuntimeException(executionRuntimeServices.getStepErrorKey()); } // return the next step position from the run env return runEnv.removeNextStepPosition(); } }
package com.katbillings.pomodoro.data; import com.katbillings.pomodoro.models.DailyLog; import org.springframework.data.repository.CrudRepository; import org.springframework.stereotype.Repository; @Repository public interface DailyLogRepository extends CrudRepository<DailyLog, Integer> { }
package com.atguigu.gulimall.product.entity; import com.atguigu.common.valid.AddGroup; import com.atguigu.common.valid.ListValue; import com.atguigu.common.valid.UpdateGroup; import com.atguigu.common.valid.UpdateStatusGroup; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import java.io.Serializable; import java.util.Date; import lombok.Data; import org.hibernate.validator.constraints.URL; import javax.validation.constraints.*; /** * 品牌 * * @author guoxiaolong * @email 18240885452@163.com * @date 2020-05-16 02:48:45 */ @Data @TableName("pms_brand") public class BrandEntity implements Serializable { private static final long serialVersionUID = 1L; /** * 品牌id */ @NotNull(message = "修改必须指定品牌id",groups = {UpdateGroup.class}) @Null(message = "新增不能指定id", groups = {AddGroup.class}) @TableId private Long brandId; /** * 品牌名 */ @NotBlank(message = "品牌名必须提交",groups = {UpdateGroup.class,AddGroup.class}) private String name; /** * 品牌logo地址 */ @NotEmpty(groups = {AddGroup.class}) @URL(message = "logo必须是一个合法的url地址",groups = {AddGroup.class,UpdateGroup.class}) private String logo; /** * 介绍 */ private String descript; /** * 显示状态[0-不显示;1-显示] */ //@Pattern @NotNull(groups = {AddGroup.class, UpdateStatusGroup.class}) @ListValue(vals = {0,1},groups = {AddGroup.class, UpdateStatusGroup.class}) private Integer showStatus; /** * 检索首字母 */ @NotEmpty(groups = {AddGroup.class}) @Pattern(regexp="^[a-zA-Z]$",message = "检索首字母必须是一个字母",groups = {AddGroup.class,UpdateGroup.class}) private String firstLetter; /** * 排序 */ @NotNull(groups = {AddGroup.class}) @Min(value = 0,message = "排序必须大于等于0",groups = {AddGroup.class,UpdateGroup.class}) private Integer sort; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.state.kerberos; import java.util.Map; import java.util.Set; import java.util.TreeMap; /** * KerberosConfigurationDescriptor is an implementation of an AbstractKerberosDescriptor that * encapsulates data related to an Ambari configuration block. * <p/> * A KerberosConfigurationDescriptor has the following properties: * <ul> * <li>type</li> * <li>properties</li> * </ul> * <p/> * The following is an example of a JSON structure that will yield a valid KerberosConfigurationDescriptor * <pre> * { * "core-site": { * "hadoop.security.authentication": "kerberos", * "hadoop.rpc.protection": "authentication; integrity; privacy", * "hadoop.security.authorization": "true" * } * } * </pre> * <p/> * In this implementation, * {@link org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptor#name} will hold the * KerberosConfigurationDescriptor#type value */ public class KerberosConfigurationDescriptor extends AbstractKerberosDescriptor { /** * A Map of the properties in this KerberosConfigurationDescriptor */ private Map<String, String> properties = null; /** * Creates a new KerberosConfigurationDescriptor * <p/> * See {@link org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor} for an * example JSON structure that may be used to generate this map. * * @param data a Map of values use to populate the data for the new instance * @see org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor */ public KerberosConfigurationDescriptor(Map<?, ?> data) { if (data != null) { Set<?> keySet = data.keySet(); if (!keySet.isEmpty()) { // Only a single entry is expected... Object key = keySet.iterator().next(); if (key != null) { Object object = data.get(key); setType(key.toString()); if (object instanceof Map) { for (Map.Entry<?, ?> entry : ((Map<?, ?>) object).entrySet()) { Object value = entry.getValue(); putProperty(entry.getKey().toString(), (value == null) ? null : value.toString()); } } } } } } /** * Returns the type of the configuration data represented by this KerberosConfigurationDescriptor * * @return a String declaring the configuration type, i.e., core-site */ public String getType() { return getName(); } /** * Sets the type of the configuration data represented by this KerberosConfigurationDescriptor * * @param type a String declaring the configuration group type, i.e., core-site */ public void setType(String type) { setName(type); } /** * Set the properties of the configuration data represented by this KerberosConfigurationDescriptor * * @param properties a Map of properties */ public void setProperties(Map<String, String> properties) { if (properties == null) { this.properties = null; } else { this.properties = new TreeMap<>(properties); } } /** * Gets the properties of the configuration data represented by this KerberosConfigurationDescriptor * * @return a Map of properties */ public Map<String, String> getProperties() { return properties; } /** * Gets the value of the configuration property with the specified name * * @param name a String declaring the name of the property to retrieve * @return a String or null if the property value is not found */ public String getProperty(String name) { return ((name == null) || (properties == null)) ? null : properties.get(name); } /** * Adds or updates the value of the configuration property with the specified name * <p/> * If the property exists, it will be overwritten; else a new entry will be created. * * @param name a String declaring the name of the property to set * @param value a String containing the value of the property */ public void putProperty(String name, String value) { if (name == null) { throw new IllegalArgumentException("The property name must not be null"); } if (properties == null) { properties = new TreeMap<>(); } properties.put(name, value); } /** * Updates this KerberosConfigurationDescriptor with data from another KerberosConfigurationDescriptor * <p/> * Properties will be updated if the relevant updated values are not null. * * @param updates the KerberosConfigurationDescriptor containing the updated values */ public void update(KerberosConfigurationDescriptor updates) { if (updates != null) { setType(updates.getType()); Map<String, String> updatedProperties = updates.getProperties(); if (updatedProperties != null) { for (Map.Entry<String, String> entry : updatedProperties.entrySet()) { putProperty(entry.getKey(), entry.getValue()); } } } } /** * Creates a Map of values that can be used to create a copy of this KerberosConfigurationDescriptor * or generate the JSON structure described in * {@link org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor} * * @return a Map of values for this KerberosConfigurationDescriptor * @see org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor */ @Override public Map<String, Object> toMap() { Map<String, Object> map = new TreeMap<>(); map.put(getName(), (properties == null) ? null : new TreeMap<String, Object>(properties)); return map; } @Override public int hashCode() { return super.hashCode() + ((getProperties() == null) ? 0 : getProperties().hashCode()); } @Override public boolean equals(Object object) { if (object == null) { return false; } else if (object == this) { return true; } else if (object.getClass() == KerberosConfigurationDescriptor.class) { KerberosConfigurationDescriptor descriptor = (KerberosConfigurationDescriptor) object; return super.equals(object) && ( (getProperties() == null) ? (descriptor.getProperties() == null) : getProperties().equals(descriptor.getProperties()) ); } else { return false; } } }
package com.waterdrop.mybatisreactive.boot.autoconfigure.sample.domain; import java.time.LocalDateTime; /** * User * */ public class User { private Long id; private String name; private Integer age; private LocalDateTime createdTime; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Integer getAge() { return age; } public void setAge(Integer age) { this.age = age; } public LocalDateTime getCreatedTime() { return createdTime; } public void setCreatedTime(LocalDateTime createdTime) { this.createdTime = createdTime; } @Override public String toString() { return "User{" + "id=" + id + ", name='" + name + '\'' + ", age=" + age + ", createdTime=" + createdTime + '}'; } }
/* * JBoss, Home of Professional Open Source. * Copyright 2011, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.jmx.logging; import static org.jboss.logging.Logger.Level.ERROR; import static org.jboss.logging.Logger.Level.WARN; import java.util.Hashtable; import java.util.List; import java.util.Map; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; import javax.management.InvalidAttributeValueException; import javax.management.JMRuntimeException; import javax.management.MBeanException; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import javax.management.OperationsException; import javax.management.ReflectionException; import javax.management.RuntimeOperationsException; import javax.management.openmbean.OpenType; import org.jboss.as.controller.PathAddress; import org.jboss.dmr.ModelType; import org.jboss.logging.BasicLogger; import org.jboss.logging.Logger; import org.jboss.logging.annotations.Cause; import org.jboss.logging.annotations.LogMessage; import org.jboss.logging.annotations.Message; import org.jboss.logging.annotations.MessageLogger; import org.jboss.msc.service.StartException; /** * @author <a href="mailto:jperkins@redhat.com">James R. Perkins</a> * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ @SuppressWarnings("DefaultAnnotationParam") @MessageLogger(projectCode = "WFLYJMX", length = 4) public interface JmxLogger extends BasicLogger { /** * A logger with the category of the package name. */ JmxLogger ROOT_LOGGER = Logger.getMessageLogger(JmxLogger.class, "org.jboss.as.jmx"); // /** // * Creates an exception indicating the inability to shutdown the RMI registry. // * // * @param cause the cause of the error. // */ // @LogMessage(level = ERROR) // @Message(id = 1, value = "Could not shutdown rmi registry") // void cannotShutdownRmiRegistry(@Cause Throwable cause); // /** // * Creates an exception indicating the JMX connector could not unbind from the registry. // * // * @param cause the cause of the error. // */ // @LogMessage(level = ERROR) // @Message(id = 2, value = "Could not stop connector server") // void cannotStopConnectorServer(@Cause Throwable cause); // /** // * Creates an exception indicating the JMX connector could not unbind from the registry. // * // * @param cause the cause of the error. // */ // @LogMessage(level = ERROR) // @Message(id = 3, value = "Could not unbind jmx connector from registry") // void cannotUnbindConnector(@Cause Throwable cause); /** * Logs a warning message indicating no {@link javax.management.ObjectName} is available to unregister. */ @LogMessage(level = WARN) @Message(id = 4, value = "No ObjectName available to unregister") void cannotUnregisterObject(); /** * Logs an error message indicating a failure to unregister the object name. * * @param cause the cause of the error. * @param name the name of the object name. */ @LogMessage(level = ERROR) @Message(id = 5, value = "Failed to unregister [%s]") void unregistrationFailure(@Cause Throwable cause, ObjectName name); /** * The jmx-connector element is no longer supported. * */ @LogMessage(level = WARN) @Message(id = 6, value = "<jmx-connector/> is no longer supporting. <remoting-connector/> should be used instead to allow remote connections via JBoss Remoting.") void jmxConnectorNotSupported(); @Message(id = Message.NONE, value = "entry") String compositeEntryTypeName(); @Message(id = Message.NONE, value = "An entry") String compositeEntryTypeDescription(); @Message (id = Message.NONE, value = "The key") String compositeEntryKeyDescription(); @Message (id = Message.NONE, value = "The value") String compositeEntryValueDescription(); @Message (id = Message.NONE, value = "A map") String compositeMapName(); @Message (id = Message.NONE, value = "The map is indexed by 'key'") String compositeMapDescription(); @Message(id = Message.NONE, value = "Complex type") String complexCompositeEntryTypeName(); @Message(id = Message.NONE, value = "A complex type") String complexCompositeEntryTypeDescription(); @Message(id = Message.NONE, value="This mbean does not support expressions for attributes or operation parameters, even when supported by the underlying model. Instead the resolved attribute is returned, and the real typed value must be used when writing attributes/invoking operations.") String descriptorMBeanExpressionSupportFalse(); @Message(id = Message.NONE, value="This mbean supports raw expressions for attributes and operation parameters where supported by the underlying model. If no expression is used, the string representation is converted into the real attribute value.") String descriptorMBeanExpressionSupportTrue(); @Message(id = Message.NONE, value="To be able to set and read expressions go to %s") String descriptorAlternateMBeanExpressions(ObjectName objectName); @Message(id = Message.NONE, value="To read resolved values and to write typed attributes and use typed operation parameters go to %s") String descriptorAlternateMBeanLegacy(ObjectName objectName); @Message(id = Message.NONE, value="This attribute supports expressions") String descriptorAttributeExpressionsAllowedTrue(); @Message(id = Message.NONE, value="This attribute does not support expressions") String descriptorAttributeExpressionsAllowedFalse(); // @Message(id = Message.NONE, value="This parameter supports expressions") // String descriptorParameterExpressionsAllowedTrue(); // @Message(id = Message.NONE, value="This parameter does not support expressions") // String descriptorParameterExpressionsAllowedFalse(); @Message(id = Message.NONE, value="A composite type representing a property") String propertyCompositeType(); @Message(id = Message.NONE, value="The property name") String propertyName(); @Message(id = Message.NONE, value="The property value") String propertyValue(); /** * Creates an exception indicating no attribute could be found matching the name. * * @param name the attribute name. * * @return an {@link AttributeNotFoundException} for the error. */ @Message(id = 7, value = "Could not find any attribute matching: %s") AttributeNotFoundException attributeNotFound(String name); /** * Creates an exception indicating the attribute is not writable. * * @param attribute the attribute that is not writable. * * @return an {@link AttributeNotFoundException} for the error. */ @Message(id = 8, value = "Attribute %s is not writable") AttributeNotFoundException attributeNotWritable(javax.management.Attribute attribute); /** * Creates an exception indicating the {@link ObjectName} could not be created for the address. * * * @param cause the cause of the error. * @param address the address. * * @param s the string * @return a {@link RuntimeException} for the error. */ @Message(id = 9, value = "Could not create ObjectName for address %s from string %s") RuntimeException cannotCreateObjectName(@Cause Throwable cause, PathAddress address, String s); /** * Creates an exception indicating the attribute could not be set. * * @param cause the cause of the error. * @param name the name of the attribute. * * @return a {@link ReflectionException} for the error. */ @Message(id = 10, value = "Could not set %s") ReflectionException cannotSetAttribute(@Cause Exception cause, String name); // /** // * Creates an exception indicating no description provider found for the address. // * // * @param address the address. // * // * @return an {@link InstanceNotFoundException} for the exception. // */ // @Message(id = 11, value = "No description provider found for %s") // InstanceNotFoundException descriptionProviderNotFound(PathAddress address); /** * Creates an exception indicating the {@code name1} has a different length than {@code name2}. * * @param name1 the first name. * @param name2 the second name. * * @return an {@link IllegalArgumentException} for the exception. */ @Message(id = 12, value = "%s and %s have different lengths") IllegalArgumentException differentLengths(String name1, String name2); /** * Creates an exception indicating the attribute type is invalid. * * @param cause the cause of the error. * @param name the attribute name. * * @return an {@link InvalidAttributeValueException} for the error. */ @Message(id = 13, value = "Bad type for '%s'") InvalidAttributeValueException invalidAttributeType(@Cause Throwable cause, String name); /** * Creates an exception indicating the key is invalid. * * @param keys the list of keys. * @param entry the entry. * * @return an {@link IllegalArgumentException} for the error. */ @Message(id = 14, value = "Invalid key %s for %s") IllegalArgumentException invalidKey(List<?> keys, Map.Entry<?, Object> entry); /** * Creates an exception indicating the {@link ObjectName} is invalid. * * @param name the name of the object. * @param message a message to append. * * @return an {@link Error} for the error. */ @Message(id = 15, value = "Invalid ObjectName: %s; %s") Error invalidObjectName(String name, String message); /** * Creates an exception indicating the {@link ObjectName} is invalid. * * @param domain the object domain. * @param table the table of name, value pairs. * @param message a message to append. * * @return an {@link Error} for the error. */ @Message(id = Message.INHERIT, value = "Invalid ObjectName: %s,%s; %s") Error invalidObjectName(String domain, Hashtable<String, String> table, String message); /** * Creates an exception indicating the {@link ObjectName} is invalid. * * @param domain the object domain. * @param key the object key. * @param value the object value. * @param message a message to append. * * @return an {@link Error} for the error. */ @Message(id = Message.INHERIT, value = "Invalid ObjectName: %s,%s,%s; %s") Error invalidObjectName(String domain, String key, String value, String message); // /** // * Creates an exception indicating a request was received for the server socket, represented by the {@code name} // * parameter, on the {@code port}, but the service socket is configured for the {@code configuredPort}. // * // * @param name the name of the server socket. // * @param port the port. // * @param configuredPort the configured port // * // * @return an {@link IllegalStateException} for the error. // */ // @Message(id = 16, value = "Received request for server socket %s on port [%d] but the service socket configured for port [%d]") // IllegalStateException invalidServerSocketPort(String name, int port, int configuredPort); /** * Creates an exception indicating no MBean found with the name. * * @param name the object name. * * @return an {@link InstanceNotFoundException} for the error. */ @Message(id = 17, value = "No MBean found with name %s") InstanceNotFoundException mbeanNotFound(ObjectName name); /** * Creates an exception indicating a failure to register the MBean. * * @param cause the cause of the error. * @param name the name of the MBean. * * @return a {@link StartException} for the error. */ @Message(id = 18, value = "Failed to register mbean [%s]") StartException mbeanRegistrationFailed(@Cause Throwable cause, String name); /** * Creates an exception indicating there is no operation called {@code operation}. * * @param operation the operation. * * @return a {@link InstanceNotFoundException} for the error. */ @Message(id = 19, value = "No operation called '%s'") InstanceNotFoundException noOperationCalled(String operation); /** * Creates an exception indicating there is no operation called {@code operation} at the {@code address}. * * @param cause the cause of the error. * @param operation the operation. * @param address the address. * * @return a {@link MBeanException} for the error. */ @Message(id = 20, value = "No operation called '%s' at %s") MBeanException noOperationCalled(@Cause Exception cause, String operation, PathAddress address); // id = 21; redundant parameter null check message /** * Creates an exception indicating there is was no registration found for the path address. * * @param address the address. * * @return an {@link InstanceNotFoundException} for the error. */ @Message(id = 22, value = "No registration found for path address %s") InstanceNotFoundException registrationNotFound(PathAddress address); // /** // * A message indicating you cannot create mbeans under the reserved domain. // * // * @param name the reserved name. // * // * @return the message. // */ // @Message(id = 23, value = "You can't create mbeans under the reserved domain '%s'") // String reservedMBeanDomain(String name); /** * Creates an exception indicating the type is unknown. * * @param type the unknown type. * * @return a {@link RuntimeException} for the error. */ @Message(id = 24, value = "Unknown type %s") RuntimeException unknownType(ModelType type); /** * Creates an exception indicating the value is unknown. * * @param value the unknown value. * * @return a {@link IllegalArgumentException} for the error. */ @Message(id = 25, value = "Unknown value %s") IllegalArgumentException unknownValue(Object value); /** * Creates an exception indicating the need for a name parameter for wildcard add. * * @return an {@link IllegalStateException} for the error. */ @Message(id = 26, value = "Need the name parameter for wildcard add") IllegalStateException wildcardNameParameterRequired(); // @Message(id = 27, value="An error happened creating a composite type for %s") // IllegalStateException errorCreatingCompositeType(@Cause OpenDataException e, OpenType<?> type); // @Message(id = 28, value="An error happened creating a composite data for %s") // IllegalStateException errorCreatingCompositeData(@Cause OpenDataException e, OpenType<?> type); @Message(id = 29, value="Unknown domain: %s") IllegalArgumentException unknownDomain(String domain); @Message(id = 30, value="Expression can not be converted into target type %s") IllegalArgumentException expressionCannotBeConvertedIntoTargeteType(OpenType<?> type); @Message(id = 31, value = "Unknown child %s") IllegalArgumentException unknownChild(String child); @Message(id = 32, value = "ObjectName cannot be null") IllegalArgumentException objectNameCantBeNull(); // @Message(id = 33, value = "'domain-name' can only be 'jboss.as'") // String domainNameMustBeJBossAs(); // @Message(id = 34, value = "'false' is the only acceptable value for 'proper-property-format'") // String properPropertyFormatMustBeFalse(); // @Message(id = 35, value = "The 'enabled' attribute of audit-log must be false") // String auditLogEnabledMustBeFalse(); @Message(id = 36, value = "There is no handler called '%s'") IllegalStateException noHandlerCalled(String name); @Message(id = 37, value = "Unauthorized access") JMRuntimeException unauthorized(); @Message(id = 38, value = "Not authorized to write attribute: '%s'") JMRuntimeException notAuthorizedToWriteAttribute(String attributeName); @Message(id = 39, value = "Not authorized to read attribute: '%s'") JMRuntimeException notAuthorizedToReadAttribute(String attributeName); @Message(id = 40, value = "Not authorized to invoke operation: '%s'") JMRuntimeException notAuthorizedToExecuteOperation(String operationName); @Message(id = 41, value="You can't create mbeans under the reserved domain '%s'") NotCompliantMBeanException cannotCreateMBeansInReservedDomain(String domain); @Message(id = 42, value="Don't know how to deserialize") OperationsException dontKnowHowToDeserialize(); @Message(id = 43, value="%s is not supported") UnsupportedOperationException unsupportedMethod(String name); @Message(id = 44, value="You can't register mbeans under the reserved domain '%s'") String cannotRegisterMBeansUnderReservedDomain(String domain); @Message(id = 45, value="You can't unregister mbeans under the reserved domain '%s'") String cannotUnregisterMBeansUnderReservedDomain(String domain); @Message(id = 46, value="The ObjectName coming from MBeanRegistration.preRegister() '%s' is in a reserved JMX domain") RuntimeOperationsException badDomainInCalclulatedObjectNameException(ObjectName name); @LogMessage(level = ERROR) @Message(id = 47, value="An error happened unregistering the '%s' MBean registered in a reserved JMX domain") void errorUnregisteringMBeanWithBadCalculatedName(@Cause Exception e, ObjectName name); @Message(id = 48, value = "Add notification listener using ObjectName %s is not supported") UnsupportedOperationException addNotificationListerWithObjectNameNotSupported(ObjectName listener); @Message(id = 49, value = "Remove notification listener using ObjectName %s is not supported") UnsupportedOperationException removeNotificationListerWithObjectNameNotSupported(ObjectName listener); @Message(id = 50, value = "Add notification listener using ObjectName %s is not supported") UnsupportedOperationException addNotificationListenerNotAllowed(ObjectName name); @Message(id = 51, value = "Remove notification listener using ObjectName %s is not supported") UnsupportedOperationException removeNotificationListenerNotAllowed(ObjectName name); // @Message(id = 52, value = "Notification emitted when the process state changes") // String processStateChangeNotificationDescription(); // @Message(id = Message.NONE, value = "The attribute '%s' has changed from '%s' to '%s'") // String jmxAttributeChange(String name, String oldState, String stateString); }
/* * (c) Copyright Christian P. Fries, Germany. All rights reserved. Contact: email@christian-fries.de. * * Created on 18.11.2012 */ package net.finmath.montecarlo.process; import java.util.Vector; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import net.finmath.montecarlo.BrownianMotionInterface; import net.finmath.optimizer.SolverException; import net.finmath.stochastic.RandomVariableInterface; /** * This class implements some numerical schemes for multi-dimensional multi-factor Ito process. * * It features the standard Euler scheme and the standard predictor-corrector Euler scheme * for <i>Y</i>, then applies the <i>state space transform</i> \( X = f(Y) \). For the standard Euler scheme * the process Y is discretized as * \[ * Y(t_{i+1}) = Y(t_{i}) + \mu(t_{i}) \Delta t_{i} + \sigma(t_{i}) \Delta W(t_{i}) \text{.} * \] * * Hence, using the <i>state space transform</i>, it is possible to create a log-Eurler scheme, i.e., * \[ * X(t_{i+1}) = X(t_{i}) \cdot \exp\left( (\mu(t_{i}) - \frac{1}{2} sigma(t_{i})^2) \Delta t_{i} + \sigma(t_{i}) \Delta W(t_{i}) \right) \text{.} * \] * * The dimension is called <code>numberOfComponents</code> here. The default for <code>numberOfFactors</code> is 1. * * @author Christian Fries * @see AbstractProcessInterface The interface definition contains more details. * @version 1.4 */ public class ProcessEulerScheme extends AbstractProcess { public enum Scheme { EULER, PREDICTOR_CORRECTOR }; private BrownianMotionInterface brownianMotion; private Scheme scheme = Scheme.EULER; // Used locally for multi-threadded calculation. private ExecutorService executor; /* * The storage of the simulated stochastic process. */ private transient RandomVariableInterface[][] discreteProcess = null; private transient RandomVariableInterface[] discreteProcessWeights; /** * Create an Euler discretization scheme. * * @param brownianMotion The Brownian driver of the process * @param scheme The scheme to use. See {@link Scheme}. */ public ProcessEulerScheme(BrownianMotionInterface brownianMotion, Scheme scheme) { super(brownianMotion.getTimeDiscretization()); this.brownianMotion = brownianMotion; this.scheme = scheme; } /** * Create an Euler discretization scheme. * * @param brownianMotion The Brownian driver of the process */ public ProcessEulerScheme(BrownianMotionInterface brownianMotion) { super(brownianMotion.getTimeDiscretization()); this.brownianMotion = brownianMotion; } /** * This method returns the realization of the process at a certain time index. * * @param timeIndex Time index at which the process should be observed * @return A vector of process realizations (on path) */ @Override public RandomVariableInterface getProcessValue(int timeIndex, int componentIndex) { // Thread safe lazy initialization synchronized(this) { if (discreteProcess == null || discreteProcess.length == 0) { doPrecalculateProcess(); } } if(discreteProcess[timeIndex][componentIndex] == null) { throw new NullPointerException("Generation of process component " + componentIndex + " at time index " + timeIndex + " failed. Likely due to out of memory"); } // Return value of process return discreteProcess[timeIndex][componentIndex]; } /** * This method returns the weights of a weighted Monte Carlo method (the probability density). * * @param timeIndex Time index at which the process should be observed * @return A vector of positive weights */ @Override public RandomVariableInterface getMonteCarloWeights(int timeIndex) { // Thread safe lazy initialization synchronized(this) { if (discreteProcessWeights == null || discreteProcessWeights.length == 0) { doPrecalculateProcess(); } } // Return value of process return discreteProcessWeights[timeIndex]; } /** * Calculates the whole (discrete) process. */ private void doPrecalculateProcess() { if (discreteProcess != null && discreteProcess.length != 0) return; final int numberOfPaths = this.getNumberOfPaths(); final int numberOfFactors = this.getNumberOfFactors(); final int numberOfComponents = this.getNumberOfComponents(); // Allocate Memory discreteProcess = new RandomVariableInterface[getTimeDiscretization().getNumberOfTimeSteps() + 1][getNumberOfComponents()]; discreteProcessWeights = new RandomVariableInterface[getTimeDiscretization().getNumberOfTimeSteps() + 1]; // Set initial Monte-Carlo weights discreteProcessWeights[0] = brownianMotion.getRandomVariableForConstant(1.0 / numberOfPaths); // Set initial value RandomVariableInterface[] initialState = getInitialState(); final RandomVariableInterface[] currentState = new RandomVariableInterface[numberOfComponents]; for (int componentIndex = 0; componentIndex < numberOfComponents; componentIndex++) { currentState[componentIndex] = initialState[componentIndex]; discreteProcess[0][componentIndex] = applyStateSpaceTransform(componentIndex, currentState[componentIndex]); } /* * Evolve the process using an Euler scheme. * The evolution is performed multi-threadded. * Each component of the vector runs in its own thread. */ // We do not allocate more threads the twice the number of processors. int numberOfThreads = Math.min(Math.max(2 * Runtime.getRuntime().availableProcessors(),1),numberOfComponents); executor = Executors.newFixedThreadPool(numberOfThreads); // Evolve process for (int timeIndex2 = 1; timeIndex2 < getTimeDiscretization().getNumberOfTimeSteps()+1; timeIndex2++) { final int timeIndex = timeIndex2; // Generate process from timeIndex-1 to timeIndex final double deltaT = getTime(timeIndex) - getTime(timeIndex - 1); // Fetch drift vector RandomVariableInterface[] drift = getDrift(timeIndex - 1, discreteProcess[timeIndex - 1], null); // Calculate new realization Vector<Future<RandomVariableInterface>> discreteProcessAtCurrentTimeIndex = new Vector<Future<RandomVariableInterface>>(numberOfComponents); discreteProcessAtCurrentTimeIndex.setSize(numberOfComponents); for (int componentIndex2 = 0; componentIndex2 < numberOfComponents; componentIndex2++) { final int componentIndex = componentIndex2; final RandomVariableInterface driftOfComponent = drift[componentIndex]; // Check if the component process has stopped to evolve if (driftOfComponent == null) continue; Callable<RandomVariableInterface> worker = new Callable<RandomVariableInterface>() { public RandomVariableInterface call() throws SolverException { RandomVariableInterface[] factorLoadings = getFactorLoading(timeIndex - 1, componentIndex, discreteProcess[timeIndex - 1]); // Check if the component process has stopped to evolve if (factorLoadings == null) return null; // Temp storage for variance and diffusion RandomVariableInterface diffusionOfComponent = brownianMotion.getRandomVariableForConstant(0.0); // Generate values for diffusionOfComponent and varianceOfComponent for (int factor = 0; factor < numberOfFactors; factor++) { RandomVariableInterface factorLoading = factorLoadings[factor]; RandomVariableInterface brownianIncrement = brownianMotion.getBrownianIncrement(timeIndex - 1, factor); diffusionOfComponent = diffusionOfComponent.addProduct(factorLoading, brownianIncrement); } RandomVariableInterface increment = diffusionOfComponent; if(driftOfComponent != null) increment = increment.addProduct(driftOfComponent, deltaT); // Add increment to state and applyStateSpaceTransform currentState[componentIndex] = currentState[componentIndex].add(increment).cache(); // Transform the state space to the value space and return it. return applyStateSpaceTransform(componentIndex, currentState[componentIndex]); } }; // The following line will add the result of the calculation to the vector discreteProcessAtCurrentTimeIndex discreteProcessAtCurrentTimeIndex.set(componentIndex, executor.submit(worker)); } // Fetch results and move to discreteProcess[timeIndex] for (int componentIndex = 0; componentIndex < numberOfComponents; componentIndex++) { try { Future<RandomVariableInterface> discreteProcessAtCurrentTimeIndexAndComponent = discreteProcessAtCurrentTimeIndex.get(componentIndex); if(discreteProcessAtCurrentTimeIndexAndComponent != null) discreteProcess[timeIndex][componentIndex] = discreteProcessAtCurrentTimeIndexAndComponent.get().cache(); else discreteProcess[timeIndex][componentIndex] = null; } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ExecutionException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (scheme == Scheme.PREDICTOR_CORRECTOR) { // Apply corrector step to realizations at next time step RandomVariableInterface[] driftWithPredictor = getDrift(timeIndex - 1, discreteProcess[timeIndex], null); for (int componentIndex = 0; componentIndex < getNumberOfComponents(); componentIndex++) { RandomVariableInterface driftWithPredictorOfComponent = driftWithPredictor[componentIndex]; RandomVariableInterface driftWithoutPredictorOfComponent = drift[componentIndex]; if (driftWithPredictorOfComponent == null || driftWithoutPredictorOfComponent == null) continue; // Calculated the predictor corrector drift adjustment RandomVariableInterface driftAdjustment = driftWithPredictorOfComponent.sub(driftWithoutPredictorOfComponent).div(2.0).mult(deltaT); // Add drift adjustment currentState[componentIndex] = currentState[componentIndex].add(driftAdjustment); // Re-apply state space transform discreteProcess[timeIndex][componentIndex] = applyStateSpaceTransform(componentIndex, currentState[componentIndex]); } // End for(componentIndex) } // End if(scheme == Scheme.PREDICTOR_CORRECTOR) // Set Monte-Carlo weights discreteProcessWeights[timeIndex] = discreteProcessWeights[timeIndex - 1]; } // End for(timeIndex) try { executor.shutdown(); } catch(SecurityException e) { // @TODO Improve exception handling here } } /** * Reset all precalculated values */ private synchronized void reset() { this.discreteProcess = null; this.discreteProcessWeights = null; } /** * @return Returns the numberOfPaths. */ @Override public int getNumberOfPaths() { return this.brownianMotion.getNumberOfPaths(); } /** * @return Returns the numberOfFactors. */ @Override public int getNumberOfFactors() { return this.brownianMotion.getNumberOfFactors(); } /** * @param seed The seed to set. * @deprecated The class will soon be changed to be immutable */ @Deprecated public void setSeed(int seed) { // Create a new Brownian motion this.setBrownianMotion(new net.finmath.montecarlo.BrownianMotion( brownianMotion.getTimeDiscretization(), brownianMotion .getNumberOfFactors(), brownianMotion .getNumberOfPaths(), seed)); // Force recalculation of the process this.reset(); } /** * @return Returns the Brownian motion used in the generation of the process */ @Override public BrownianMotionInterface getBrownianMotion() { return brownianMotion; } /** * @param brownianMotion The brownianMotion to set. * @deprecated Do not use anymore. Processes should be immutable. */ @Deprecated public void setBrownianMotion( net.finmath.montecarlo.BrownianMotion brownianMotion) { this.brownianMotion = brownianMotion; // Force recalculation of the process this.reset(); } /** * @return Returns the scheme. */ public Scheme getScheme() { return scheme; } /** * @param scheme The scheme to set. * @deprecated Do not use anymore. Processes should be immutable. */ @Deprecated public void setScheme(Scheme scheme) { this.scheme = scheme; // Force recalculation of the process this.reset(); } /* (non-Javadoc) * @see net.finmath.montecarlo.process.AbstractProcess#clone() */ @Override public ProcessEulerScheme clone() { return new ProcessEulerScheme(getBrownianMotion()); } /* (non-Javadoc) * @see net.finmath.montecarlo.process.AbstractProcess#getCloneWithModifiedSeed(int) */ @Override public Object getCloneWithModifiedSeed(int seed) { return new ProcessEulerScheme((BrownianMotionInterface)getBrownianMotion().getCloneWithModifiedSeed(seed)); } }
package com.lance.pattern.Structural.adapter; /** * Created by lihua on 26/4/2017. * email: lihua@qq.com */ interface Shape { void draw(int x, int y, int z, int j); }
package br.com.zupacademy.isadora.ecommerce.config.validacao; public class ErroDeFormularioDTO { private String campo; private String erro; public ErroDeFormularioDTO(String campo, String erro) { this.campo = campo; this.erro = erro; } public String getCampo() { return campo; } public String getErro() { return erro; } }
// Copyright 2008-2010 Victor Iacoban // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under // the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions and // limitations under the License. package org.zmlx.hg4idea.provider.update; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.update.*; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.zmlx.hg4idea.ui.HgUpdateDialog; import javax.swing.*; import java.util.Collection; import java.util.LinkedList; import java.util.List; public class HgUpdateEnvironment implements UpdateEnvironment { private final Project project; @NotNull private final HgUpdateConfigurationSettings updateConfiguration; public HgUpdateEnvironment(Project project) { this.project = project; updateConfiguration = ServiceManager.getService(project, HgUpdateConfigurationSettings.class); } @Override public void fillGroups(UpdatedFiles updatedFiles) { } @Override @NotNull public UpdateSession updateDirectories(@NotNull FilePath[] contentRoots, UpdatedFiles updatedFiles, ProgressIndicator indicator, @NotNull Ref<SequentialUpdatesContext> context) { List<VcsException> exceptions = new LinkedList<>(); boolean[] result = {true}; for (FilePath contentRoot : contentRoots) { if (indicator != null) { indicator.checkCanceled(); } VirtualFile repository = ProjectLevelVcsManager.getInstance(project).getVcsRootFor(contentRoot); if (repository == null) { continue; } ProgressManager.getInstance().executeNonCancelableSection(()->{ try { HgUpdater updater = new HgRegularUpdater(project, repository, updateConfiguration); result[0] &= updater.update(updatedFiles, indicator, exceptions); } catch (VcsException e) { //TODO include module name where exception occurred exceptions.add(e); } }); } return new UpdateSessionAdapter(exceptions, !result[0]); } @Override public Configurable createConfigurable(Collection<FilePath> contentRoots) { return new UpdateConfigurable(updateConfiguration); } @Override public boolean validateOptions(Collection<FilePath> roots) { return true; } public static class UpdateConfigurable implements Configurable { private final HgUpdateConfigurationSettings updateConfiguration; protected HgUpdateDialog updateDialog; public UpdateConfigurable(@NotNull HgUpdateConfigurationSettings updateConfiguration) { this.updateConfiguration = updateConfiguration; } @Override @Nls public String getDisplayName() { return "Update"; } @Override public String getHelpTopic() { return "reference.VersionControl.Mercurial.UpdateProject"; } @Override public JComponent createComponent() { updateDialog = new HgUpdateDialog(); return updateDialog.getContentPanel(); } @Override public boolean isModified() { return true; } @Override public void apply() { updateDialog.applyTo(updateConfiguration); } @Override public void reset() { updateDialog.updateFrom(updateConfiguration); } @Override public void disposeUIResources() { updateDialog = null; } } }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.server.plugin; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.parser.OSystemVariableResolver; import com.orientechnologies.common.util.OCallable; import com.orientechnologies.common.util.OService; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.exception.OConfigurationException; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.server.OServer; import com.orientechnologies.orient.server.config.OServerEntryConfiguration; import com.orientechnologies.orient.server.config.OServerParameterConfiguration; import com.orientechnologies.orient.server.network.OServerNetworkListener; import com.orientechnologies.orient.server.network.protocol.http.ONetworkProtocolHttpAbstract; import com.orientechnologies.orient.server.network.protocol.http.command.get.OServerCommandGetStaticContent; import com.orientechnologies.orient.server.network.protocol.http.command.get.OServerCommandGetStaticContent.OStaticContent; import java.io.BufferedInputStream; import java.io.File; import java.io.InputStream; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; /** * Manages Server Extensions * * @author Luca Garulli (l.garulli--at--orientechnologies.com) * */ public class OServerPluginManager implements OService { private static final int CHECK_DELAY = 5000; private OServer server; private ConcurrentHashMap<String, OServerPluginInfo> activePlugins = new ConcurrentHashMap<String, OServerPluginInfo>(); private ConcurrentHashMap<String, String> loadedPlugins = new ConcurrentHashMap<String, String>(); private volatile TimerTask autoReloadTimerTask; public void config(OServer iServer) { server = iServer; } public void startup() { boolean hotReload = true; boolean dynamic = true; if (server.getConfiguration() != null && server.getConfiguration().properties != null) for (OServerEntryConfiguration p : server.getConfiguration().properties) { if (p.name.equals("plugin.hotReload")) hotReload = Boolean.parseBoolean(p.value); else if (p.name.equals("plugin.dynamic")) dynamic = Boolean.parseBoolean(p.value); } if (!dynamic) return; updatePlugins(); if (hotReload) { // SCHEDULE A TIMER TASK FOR AUTO-RELOAD final TimerTask timerTask = new TimerTask() { @Override public void run() { updatePlugins(); } }; Orient.instance().scheduleTask(timerTask, CHECK_DELAY, CHECK_DELAY); autoReloadTimerTask = timerTask; } } public OServerPluginInfo getPluginByName(final String iName) { if (iName == null) return null; return activePlugins.get(iName); } public String getPluginNameByFile(final String iFileName) { return loadedPlugins.get(iFileName); } public OServerPluginInfo getPluginByFile(final String iFileName) { return getPluginByName(getPluginNameByFile(iFileName)); } public String[] getPluginNames() { return activePlugins.keySet().toArray(new String[activePlugins.size()]); } public void registerPlugin(final OServerPluginInfo iPlugin) { final String pluginName = iPlugin.getName(); if (activePlugins.containsKey(pluginName)) throw new IllegalStateException("Plugin '" + pluginName + "' already registered"); activePlugins.putIfAbsent(pluginName, iPlugin); } public Collection<OServerPluginInfo> getPlugins() { return activePlugins.values(); } public void uninstallPluginByFile(final String iFileName) { final String pluginName = loadedPlugins.remove(iFileName); if (pluginName != null) { OLogManager.instance().info(this, "Uninstalling dynamic plugin '%s'...", iFileName); final OServerPluginInfo removedPlugin = activePlugins.remove(pluginName); if (removedPlugin != null) removedPlugin.shutdown(); } } @Override public void shutdown() { OLogManager.instance().info(this, "Shutting down plugins:"); for (Entry<String, OServerPluginInfo> pluginInfoEntry : activePlugins.entrySet()) { OLogManager.instance().info(this, "- %s", pluginInfoEntry.getKey()); final OServerPluginInfo plugin = pluginInfoEntry.getValue(); try { plugin.shutdown(false); } catch (Throwable t) { OLogManager.instance().error(this, "Error during server plugin %s shutdown.", t, plugin); } } if (autoReloadTimerTask != null) autoReloadTimerTask.cancel(); } @Override public String getName() { return "plugin-manager"; } protected String updatePlugin(final File pluginFile) { final String pluginFileName = pluginFile.getName(); if (!pluginFile.isDirectory() && !pluginFileName.endsWith(".jar") && !pluginFileName.endsWith(".zip")) // SKIP IT return null; if( pluginFile.isHidden()) // HIDDEN FILE, SKIP IT return null; OServerPluginInfo currentPluginData = getPluginByFile(pluginFileName); final long fileLastModified = pluginFile.lastModified(); if (currentPluginData != null) { if (fileLastModified <= currentPluginData.getLoadedOn()) // ALREADY LOADED, SKIPT IT return pluginFileName; // SHUTDOWN PREVIOUS INSTANCE try { currentPluginData.shutdown(); activePlugins.remove(loadedPlugins.remove(pluginFileName)); } catch (Exception e) { // IGNORE EXCEPTIONS OLogManager.instance().debug(this, "Error on shutdowning plugin '%s'...", e, pluginFileName); } } installDynamicPlugin(pluginFile); return pluginFileName; } protected void registerStaticDirectory(final OServerPluginInfo iPluginData) { Object pluginWWW = iPluginData.getParameter("www"); if (pluginWWW == null) pluginWWW = iPluginData.getName(); final OServerNetworkListener httpListener = server.getListenerByProtocol(ONetworkProtocolHttpAbstract.class); if (httpListener == null) throw new OConfigurationException("HTTP listener not registered while installing Static Content command"); final OServerCommandGetStaticContent command = (OServerCommandGetStaticContent) httpListener .getCommand(OServerCommandGetStaticContent.class); if (command != null) { final URL wwwURL = iPluginData.getClassLoader().findResource("www/"); final OCallable<Object, String> callback; if (wwwURL != null) callback = createStaticLinkCallback(iPluginData, wwwURL); else // LET TO THE COMMAND TO CONTROL IT callback = new OCallable<Object, String>() { @Override public Object call(final String iArgument) { return iPluginData.getInstance().getContent(iArgument); } }; command.registerVirtualFolder(pluginWWW.toString(), callback); } } protected OCallable<Object, String> createStaticLinkCallback(final OServerPluginInfo iPluginData, final URL wwwURL) { return new OCallable<Object, String>() { @Override public Object call(final String iArgument) { String fileName = "www/" + iArgument; final URL url = iPluginData.getClassLoader().findResource(fileName); if (url != null) { final OServerCommandGetStaticContent.OStaticContent content = new OStaticContent(); content.is = new BufferedInputStream(iPluginData.getClassLoader().getResourceAsStream(fileName)); content.contentSize = -1; content.type = OServerCommandGetStaticContent.getContentType(url.getFile()); return content; } return null; } }; } @SuppressWarnings("unchecked") protected OServerPlugin startPluginClass(final URLClassLoader pluginClassLoader, final String iClassName, final OServerParameterConfiguration[] params) throws Exception { final Class<? extends OServerPlugin> classToLoad = (Class<? extends OServerPlugin>) Class.forName(iClassName, true, pluginClassLoader); final OServerPlugin instance = classToLoad.newInstance(); // CONFIG() final Method configMethod = classToLoad.getDeclaredMethod("config", OServer.class, OServerParameterConfiguration[].class); configMethod.invoke(instance, server, params); // STARTUP() final Method startupMethod = classToLoad.getDeclaredMethod("startup"); startupMethod.invoke(instance); return instance; } private void updatePlugins() { final File pluginsDirectory = new File(OSystemVariableResolver.resolveSystemVariables("${ORIENTDB_HOME}", ".") + "/plugins/"); if (!pluginsDirectory.exists()) pluginsDirectory.mkdirs(); final File[] plugins = pluginsDirectory.listFiles(); final Set<String> currentDynamicPlugins = new HashSet<String>(); for (Entry<String, String> entry : loadedPlugins.entrySet()) { currentDynamicPlugins.add(entry.getKey()); } if (plugins != null) for (File plugin : plugins) { final String pluginName = updatePlugin(plugin); if (pluginName != null) currentDynamicPlugins.remove(pluginName); } // REMOVE MISSING PLUGIN for (String pluginName : currentDynamicPlugins) uninstallPluginByFile(pluginName); } private void installDynamicPlugin(final File pluginFile) { String pluginName = pluginFile.getName(); final OServerPluginInfo currentPluginData; OLogManager.instance().info(this, "Installing dynamic plugin '%s'...", pluginName); URLClassLoader pluginClassLoader = null; try { final URL url = pluginFile.toURI().toURL(); pluginClassLoader = new URLClassLoader(new URL[] { url }); // LOAD PLUGIN.JSON FILE final URL r = pluginClassLoader.findResource("plugin.json"); if (r == null) { OLogManager.instance().error(this, "Plugin definition file ('plugin.json') is not found for dynamic plugin '%s'", pluginName); throw new IllegalArgumentException(String.format( "Plugin definition file ('plugin.json') is not found for dynamic plugin '%s'", pluginName)); } final InputStream pluginConfigFile = r.openStream(); try { if (pluginConfigFile == null || pluginConfigFile.available() == 0) { OLogManager.instance().error(this, "Error on loading 'plugin.json' file for dynamic plugin '%s'", pluginName); throw new IllegalArgumentException(String.format("Error on loading 'plugin.json' file for dynamic plugin '%s'", pluginName)); } final ODocument properties = new ODocument().fromJSON(pluginConfigFile); if (properties.containsField("name")) // OVERWRITE PLUGIN NAME pluginName = properties.field("name"); final String pluginClass = properties.field("javaClass"); final OServerPlugin pluginInstance; final Map<String, Object> parameters; if (pluginClass != null) { // CREATE PARAMETERS parameters = properties.field("parameters"); final List<OServerParameterConfiguration> params = new ArrayList<OServerParameterConfiguration>(); for (String paramName : parameters.keySet()) { params.add(new OServerParameterConfiguration(paramName, (String) parameters.get(paramName))); } final OServerParameterConfiguration[] pluginParams = params.toArray(new OServerParameterConfiguration[params.size()]); pluginInstance = startPluginClass(pluginClassLoader, pluginClass, pluginParams); } else { pluginInstance = null; parameters = null; } // REGISTER THE PLUGIN currentPluginData = new OServerPluginInfo(pluginName, (String) properties.field("version"), (String) properties.field("description"), (String) properties.field("web"), pluginInstance, parameters, pluginFile.lastModified(), pluginClassLoader); registerPlugin(currentPluginData); loadedPlugins.put(pluginFile.getName(), pluginName); registerStaticDirectory(currentPluginData); } finally { pluginConfigFile.close(); } } catch (Exception e) { OLogManager.instance().error(this, "Error on installing dynamic plugin '%s'", e, pluginName); } } }
package br.com.bookstore.purchase.purchase; import lombok.Getter; @Getter public enum Sex { MALE, FEMALE, OTHER }
/* * This file is part of the DITA Open Toolkit project. * * Copyright 2011 Jarno Elovirta * * See the accompanying LICENSE file for applicable license. */ package org.dita.dost.util; import static org.junit.Assert.*; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.URLUtils.*; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; import org.dita.dost.store.StreamStore; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.dita.dost.TestUtils; public final class JobTest { private static final File resourceDir = TestUtils.getResourceDir(JobTest.class); private static final File srcDir = new File(resourceDir, "src"); private static File tempDir; private static Job job; @BeforeClass public static void setUp() throws IOException { tempDir = TestUtils.createTempDir(JobTest.class); TestUtils.copy(srcDir, tempDir); job = new Job(tempDir, new StreamStore(tempDir, new XMLUtils())); } @Test public void testGetProperty() { assertEquals("/foo/bar", job.getProperty(INPUT_DIR)); assertEquals("file:/foo/bar", job.getProperty(INPUT_DIR_URI)); } @Test public void testSetProperty() { job.setProperty("foo", "bar"); assertEquals("bar", job.getProperty("foo")); } @Test public void testGetFileInfo() throws URISyntaxException { final URI relative = new URI("foo/bar.dita"); final URI absolute = tempDir.toURI().resolve(relative); final Job.FileInfo fi = new Job.FileInfo.Builder().uri(relative).build(); job.add(fi); assertEquals(fi, job.getFileInfo(relative)); assertEquals(fi, job.getFileInfo(absolute)); assertNull(job.getFileInfo((URI) null)); } @Test public void testGetInputMap() { assertEquals(toURI("foo"), job.getInputMap()); } @Test public void testGetValue() throws URISyntaxException { assertEquals(new URI("file:/foo/bar"), job.getInputDir()); } @Test @Ignore public void write_performance_large() throws IOException { for (int i = 0; i < 60_000; i++) { job.add(Job.FileInfo.builder() .src(new File(tempDir, "topic_" + i + ".dita").toURI()) .uri(new File("topic_" + i + ".dita").toURI()) .result(new File(tempDir, "topic_" + i + ".html").toURI()) .format("dita") .hasKeyref(true) .hasLink(true) .build()); } final long start = System.currentTimeMillis(); job.write(); final long end = System.currentTimeMillis(); System.out.println(((end - start)) + " ms"); } @AfterClass public static void tearDown() throws IOException { TestUtils.forceDelete(tempDir); } }
package com.atguigu.gulimall.product.dao; import com.atguigu.gulimall.product.entity.CommentReplayEntity; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import org.apache.ibatis.annotations.Mapper; /** * 商品评价回复关系 * * @author hhf * @email hhf@gmail.com * @date 2020-04-01 22:30:38 */ @Mapper public interface CommentReplayDao extends BaseMapper<CommentReplayEntity> { }
// -------------------------------------------------------------------------------- // Copyright 2002-2022 Echo Three, LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // -------------------------------------------------------------------------------- package com.echothree.control.user.forum.common.spec; import com.echothree.control.user.core.common.spec.MimeTypeSpec; public interface ForumMimeTypeSpec extends ForumSpec, MimeTypeSpec { // Nothing additional beyond ForumSpec, MimeTypeSpec }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.security.PrivilegedExceptionAction; import java.util.Collection; import java.util.Date; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.JspWriter; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.mapred.JobHistory.JobInfo; import org.apache.hadoop.mapred.JobHistory.Keys; import org.apache.hadoop.mapred.JobTracker.RetireJobInfo; import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.StringUtils; class JSPUtil { static final String PRIVATE_ACTIONS_KEY = "webinterface.private.actions"; //LRU based cache private static final Map<String, JobInfo> jobHistoryCache = new LinkedHashMap<String, JobInfo>(); private static final Log LOG = LogFactory.getLog(JSPUtil.class); /** * Wraps the {@link JobInProgress} object and contains boolean for * 'job view access' allowed or not. * This class is only for usage by JSPs and Servlets. */ static class JobWithViewAccessCheck { private JobInProgress job = null; // true if user is authorized to view this job private boolean isViewAllowed = true; JobWithViewAccessCheck(JobInProgress job) { this.job = job; } JobInProgress getJob() { return job; } boolean isViewJobAllowed() { return isViewAllowed; } void setViewAccess(boolean isViewAllowed) { this.isViewAllowed = isViewAllowed; } } /** * Validates if current user can view the job. * If user is not authorized to view the job, this method will modify the * response and forwards to an error page and returns Job with * viewJobAccess flag set to false. * @return JobWithViewAccessCheck object(contains JobInProgress object and * viewJobAccess flag). Callers of this method will check the flag * and decide if view should be allowed or not. Job will be null if * the job with given jobid doesnot exist at the JobTracker. */ public static JobWithViewAccessCheck checkAccessAndGetJob(final JobTracker jt, JobID jobid, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { final JobInProgress job = jt.getJob(jobid); JobWithViewAccessCheck myJob = new JobWithViewAccessCheck(job); if (!jt.areACLsEnabled() || job == null) { return myJob; } String user = request.getRemoteUser(); if (user == null) { JSPUtil.setErrorAndForward("Null user", request, response); myJob.setViewAccess(false); return myJob; } final UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user); try { ugi.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws IOException, ServletException { // checks job view permission jt.getACLsManager().checkAccess(job, ugi, Operation.VIEW_JOB_DETAILS); return null; } }); } catch (AccessControlException e) { String errMsg = "User " + ugi.getShortUserName() + " failed to view " + jobid + "!<br><br>" + e.getMessage() + "<hr><a href=\"jobtracker.jsp\">Go back to JobTracker</a><br>"; JSPUtil.setErrorAndForward(errMsg, request, response); myJob.setViewAccess(false); } catch (InterruptedException e) { String errMsg = " Interrupted while trying to access " + jobid + "<hr><a href=\"jobtracker.jsp\">Go back to JobTracker</a><br>"; JSPUtil.setErrorAndForward(errMsg, request, response); myJob.setViewAccess(false); } return myJob; } /** * Sets error code SC_UNAUTHORIZED in response and forwards to * error page which contains error message and a back link. */ public static void setErrorAndForward(String errMsg, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { request.setAttribute("error.msg", errMsg); RequestDispatcher dispatcher = request.getRequestDispatcher( "/job_authorization_error.jsp"); response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); dispatcher.forward(request, response); } /** * Method used to process the request from the job page based on the * request which it has received. For example like changing priority. * * @param request HTTP request Object. * @param response HTTP response object. * @param tracker {@link JobTracker} instance * @throws IOException * @throws InterruptedException * @throws ServletException */ public static void processButtons(HttpServletRequest request, HttpServletResponse response, final JobTracker tracker) throws IOException, InterruptedException, ServletException { String user = request.getRemoteUser(); if (privateActionsAllowed(tracker.conf) && request.getParameter("killJobs") != null) { String[] jobs = request.getParameterValues("jobCheckBox"); if (jobs != null) { boolean notAuthorized = false; String errMsg = "User " + user + " failed to kill the following job(s)!<br><br>"; for (String job : jobs) { final JobID jobId = JobID.forName(job); if (user != null) { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user); try { ugi.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws IOException{ tracker.killJob(jobId);// checks job modify permission return null; } }); } catch(AccessControlException e) { errMsg = errMsg.concat("<br>" + e.getMessage()); notAuthorized = true; // We don't return right away so that we can try killing other // jobs that are requested to be killed. continue; } } else {// no authorization needed tracker.killJob(jobId); } } if (notAuthorized) {// user is not authorized to kill some/all of jobs errMsg = errMsg.concat( "<br><hr><a href=\"jobtracker.jsp\">Go back to JobTracker</a><br>"); setErrorAndForward(errMsg, request, response); return; } } } if (privateActionsAllowed(tracker.conf) && request.getParameter("changeJobPriority") != null) { String[] jobs = request.getParameterValues("jobCheckBox"); if (jobs != null) { final JobPriority jobPri = JobPriority.valueOf(request .getParameter("setJobPriority")); boolean notAuthorized = false; String errMsg = "User " + user + " failed to set priority for the following job(s)!<br><br>"; for (String job : jobs) { final JobID jobId = JobID.forName(job); if (user != null) { UserGroupInformation ugi = UserGroupInformation. createRemoteUser(user); try { ugi.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws IOException{ // checks job modify permission tracker.setJobPriority(jobId, jobPri); return null; } }); } catch(AccessControlException e) { errMsg = errMsg.concat("<br>" + e.getMessage()); notAuthorized = true; // We don't return right away so that we can try operating on // other jobs. continue; } } else {// no authorization needed tracker.setJobPriority(jobId, jobPri); } } if (notAuthorized) {// user is not authorized to kill some/all of jobs errMsg = errMsg.concat( "<br><hr><a href=\"jobtracker.jsp\">Go back to JobTracker</a><br>"); setErrorAndForward(errMsg, request, response); return; } } } } /** * Method used to generate the Job table for Job pages. * * @param label display heading to be used in the job table. * @param jobs vector of jobs to be displayed in table. * @param refresh refresh interval to be used in jobdetails page. * @param rowId beginning row id to be used in the table. * @return * @throws IOException */ public static String generateJobTable(String label, Collection<JobInProgress> jobs , int refresh, int rowId, JobConf conf) throws IOException { boolean isModifiable = label.equals("Running") && privateActionsAllowed(conf); StringBuffer sb = new StringBuffer(); sb.append("<table border=\"1\" cellpadding=\"5\" cellspacing=\"0\" class=\"sortable\">\n"); if (jobs.size() > 0) { if (isModifiable) { sb.append("<form action=\"/jobtracker.jsp\" onsubmit=\"return confirmAction();\" method=\"POST\">"); sb.append("<tr>"); sb.append("<td><input type=\"Button\" onclick=\"selectAll()\" " + "value=\"Select All\" id=\"checkEm\"></td>"); sb.append("<td>"); sb.append("<input type=\"submit\" name=\"killJobs\" value=\"Kill Selected Jobs\">"); sb.append("</td"); sb.append("<td><nobr>"); sb.append("<select name=\"setJobPriority\">"); for (JobPriority prio : JobPriority.values()) { sb.append("<option" + (JobPriority.NORMAL == prio ? " selected=\"selected\">" : ">") + prio + "</option>"); } sb.append("</select>"); sb.append("<input type=\"submit\" name=\"changeJobPriority\" " + "value=\"Change\">"); sb.append("</nobr></td>"); sb.append("<td colspan=\"10\">&nbsp;</td>"); sb.append("</tr>"); sb.append("<td>&nbsp;</td>"); } else { sb.append("<tr>"); } sb.append("<td><b>Jobid</b></td>"); sb.append("<td><b>Started</b></td>"); sb.append("<td><b>Priority</b></td>"); sb.append("<td><b>User</b></td>"); sb.append("<td><b>Name</b></td>"); sb.append("<td><b>Map % Complete</b></td>"); sb.append("<td><b>Map Total</b></td>"); sb.append("<td><b>Maps Completed</b></td>"); sb.append("<td><b>Reduce % Complete</b></td>"); sb.append("<td><b>Reduce Total</b></td>"); sb.append("<td><b>Reduces Completed</b></td>"); sb.append("<td><b>Job Scheduling Information</b></td>"); sb.append("<td><b>Diagnostic Info </b></td>"); sb.append("</tr>\n"); for (Iterator<JobInProgress> it = jobs.iterator(); it.hasNext(); ++rowId) { JobInProgress job = it.next(); Date time = new Date(job.getStartTime()); JobProfile profile = job.getProfile(); JobStatus status = job.getStatus(); JobID jobid = profile.getJobID(); int desiredMaps = job.desiredMaps(); int desiredReduces = job.desiredReduces(); int completedMaps = job.finishedMaps(); int completedReduces = job.finishedReduces(); String name = HtmlQuoting.quoteHtmlChars(profile.getJobName()); String jobpri = job.getPriority().toString(); String schedulingInfo = HtmlQuoting.quoteHtmlChars(job.getStatus().getSchedulingInfo()); String diagnosticInfo = HtmlQuoting.quoteHtmlChars(job.getStatus().getFailureInfo()); if (isModifiable) { sb.append("<tr><td><input TYPE=\"checkbox\" " + "onclick=\"checkButtonVerbage()\" " + "name=\"jobCheckBox\" value=" + jobid + "></td>"); } else { sb.append("<tr>"); } sb.append("<td id=\"job_" + rowId + "\"><a href=\"jobdetails.jsp?jobid=" + jobid + "&refresh=" + refresh + "\">" + jobid + "</a></td>" + "<td id=\"started_" + rowId + "\">" + time + "</td>" + "<td id=\"priority_" + rowId + "\">" + jobpri + "</td>" + "<td id=\"user_" + rowId + "\">" + HtmlQuoting.quoteHtmlChars(profile.getUser()) + "</td>" + "<td id=\"name_" + rowId + "\">" + ("".equals(name) ? "&nbsp;" : name) + "</td>" + "<td>" + StringUtils.formatPercent(status.mapProgress(), 2) + ServletUtil.percentageGraph(status.mapProgress() * 100, 80) + "</td><td>" + desiredMaps + "</td><td>" + completedMaps + "</td><td>" + StringUtils.formatPercent(status.reduceProgress(), 2) + ServletUtil.percentageGraph(status.reduceProgress() * 100, 80) + "</td><td>" + desiredReduces + "</td><td> " + completedReduces + "</td><td>" + schedulingInfo + "</td><td>" + diagnosticInfo + "</td></tr>\n"); } if (isModifiable) { sb.append("</form>\n"); } } else { sb.append("<tr><td align=\"center\" colspan=\"8\"><i>none</i>" + "</td></tr>\n"); } sb.append("</table>\n"); return sb.toString(); } @SuppressWarnings("unchecked") public static String generateRetiredJobTable(JobTracker tracker, int rowId) throws IOException { StringBuffer sb = new StringBuffer(); sb.append("<table border=\"1\" cellpadding=\"5\" cellspacing=\"0\">\n"); Iterator<RetireJobInfo> iterator = tracker.retireJobs.getAll().descendingIterator(); if (!iterator.hasNext()) { sb.append("<tr><td align=\"center\" colspan=\"8\"><i>none</i>" + "</td></tr>\n"); } else { sb.append("<tr>"); sb.append("<td><b>Jobid</b></td>"); sb.append("<td><b>Priority</b></td>"); sb.append("<td><b>User</b></td>"); sb.append("<td><b>Name</b></td>"); sb.append("<td><b>State</b></td>"); sb.append("<td><b>Start Time</b></td>"); sb.append("<td><b>Finish Time</b></td>"); sb.append("<td><b>Map % Complete</b></td>"); sb.append("<td><b>Reduce % Complete</b></td>"); sb.append("<td><b>Job Scheduling Information</b></td>"); sb.append("<td><b>Diagnostic Info </b></td>"); sb.append("</tr>\n"); for (int i = 0; i < 100 && iterator.hasNext(); i++) { RetireJobInfo info = iterator.next(); String historyFile = info.getHistoryFile(); String historyFileUrl = null; if (historyFile != null && !historyFile.equals("")) { try { historyFileUrl = URLEncoder.encode(info.getHistoryFile(), "UTF-8"); } catch (UnsupportedEncodingException e) { LOG.warn("Can't create history url ", e); } } sb.append("<tr>"); sb.append( "<td id=\"job_" + rowId + "\">" + (historyFileUrl == null ? "" : "<a href=\"" + JobHistoryServer.getHistoryUrlPrefix(tracker.conf) + "/jobdetailshistory.jsp?logFile=" + historyFileUrl + "\">") + info.status.getJobId() + "</a></td>" + "<td id=\"priority_" + rowId + "\">" + info.status.getJobPriority().toString() + "</td>" + "<td id=\"user_" + rowId + "\">" + HtmlQuoting.quoteHtmlChars(info.profile.getUser()) + "</td>" + "<td id=\"name_" + rowId + "\">" + HtmlQuoting.quoteHtmlChars(info.profile.getJobName()) + "</td>" + "<td>" + JobStatus.getJobRunState(info.status.getRunState()) + "</td>" + "<td>" + new Date(info.status.getStartTime()) + "</td>" + "<td>" + new Date(info.finishTime) + "</td>" + "<td>" + StringUtils.formatPercent(info.status.mapProgress(), 2) + ServletUtil.percentageGraph(info.status.mapProgress() * 100, 80) + "</td>" + "<td>" + StringUtils.formatPercent(info.status.reduceProgress(), 2) + ServletUtil.percentageGraph( info.status.reduceProgress() * 100, 80) + "</td>" + "<td>" + HtmlQuoting.quoteHtmlChars(info.status.getSchedulingInfo()) + "</td>" + "<td>" + HtmlQuoting.quoteHtmlChars(info.status.getFailureInfo()) + "</td></tr>\n"); rowId++; } } sb.append("</table>\n"); return sb.toString(); } static Path getJobConfFilePath(Path logFile) { return JobHistory.confPathFromLogFilePath(logFile); } /** * Read a job-history log file and construct the corresponding {@link JobInfo} * . Also cache the {@link JobInfo} for quick serving further requests. * * @param logFile * @param fs * @return JobInfo * @throws IOException */ static JobInfo getJobInfo(Path logFile, FileSystem fs, JobConf jobConf, ACLsManager acLsManager, String user) throws IOException { String jobid = getJobID(logFile.getName()); JobInfo jobInfo = null; synchronized(jobHistoryCache) { jobInfo = jobHistoryCache.remove(jobid); if (jobInfo == null) { jobInfo = new JobHistory.JobInfo(jobid); LOG.info("Loading Job History file "+jobid + ". Cache size is " + jobHistoryCache.size()); DefaultJobHistoryParser.parseJobTasks(logFile.toUri().getPath(), jobInfo, fs); } jobHistoryCache.put(jobid, jobInfo); int CACHE_SIZE = jobConf.getInt("mapred.job.tracker.jobhistory.lru.cache.size", 5); if (jobHistoryCache.size() > CACHE_SIZE) { Iterator<Map.Entry<String, JobInfo>> it = jobHistoryCache.entrySet().iterator(); String removeJobId = it.next().getKey(); it.remove(); LOG.info("Job History file removed form cache "+removeJobId); } } UserGroupInformation currentUser; if (user == null) { currentUser = UserGroupInformation.getCurrentUser(); } else { currentUser = UserGroupInformation.createRemoteUser(user); } // Authorize the user for view access of this job acLsManager.checkAccess(jobid, currentUser, jobInfo.getJobQueue(), Operation.VIEW_JOB_DETAILS, jobInfo.get(Keys.USER), jobInfo.getJobACLs().get(JobACL.VIEW_JOB)); return jobInfo; } /** * Check the access for users to view job-history pages. * * @param request * @param response * @param fs * @param logFile * @return the job if authorization is disabled or if the authorization checks * pass. Otherwise return null. * @throws IOException * @throws InterruptedException * @throws ServletException */ static JobInfo checkAccessAndGetJobInfo(HttpServletRequest request, HttpServletResponse response, final JobConf jobConf, final ACLsManager acLsManager, final FileSystem fs, final Path logFile) throws IOException, InterruptedException, ServletException { String jobid = getJobID(logFile.getName()); String user = request.getRemoteUser(); JobInfo job = null; if (user != null) { try { job = JSPUtil.getJobInfo(logFile, fs, jobConf, acLsManager, user); } catch (AccessControlException e) { String trackerAddress = jobConf.get("mapred.job.tracker.http.address"); String errMsg = String.format( "User %s failed to view %s!<br><br>%s" + "<hr>" + "<a href=\"jobhistory.jsp\">Go back to JobHistory</a><br>" + "<a href=\"http://" + trackerAddress + "/jobtracker.jsp\">Go back to JobTracker</a>", user, jobid, e.getMessage()); JSPUtil.setErrorAndForward(errMsg, request, response); return null; } } else { // no authorization needed job = JSPUtil.getJobInfo(logFile, fs, jobConf, acLsManager, null); } return job; } static String getJobID(String historyFileName) { return JobHistory.jobIdNameFromLogFileName(historyFileName); } static String getUserName(String historyFileName) { return JobHistory.userNameFromLogFileName(historyFileName); } static String getJobName(String historyFileName) { return JobHistory.jobNameFromLogFileName(historyFileName); } /** * Nicely print the Job-ACLs * @param tracker * @param jobAcls * @param out * @throws IOException */ static void printJobACLs(JobTracker tracker, Map<JobACL, AccessControlList> jobAcls, JspWriter out) throws IOException { if (tracker.areACLsEnabled()) { printJobACLsInternal(jobAcls, out); } else { out.print("<b>Job-ACLs: " + new AccessControlList("*").toString() + "</b><br>"); } } static void printJobACLs(JobConf conf, Map<JobACL, AccessControlList> jobAcls, JspWriter out) throws IOException { if (conf.getBoolean(JobConf.MR_ACLS_ENABLED, false)) { printJobACLsInternal(jobAcls, out); } else { out.print("<b>Job-ACLs: " + new AccessControlList("*").toString() + "</b><br>"); } } private static void printJobACLsInternal(Map<JobACL, AccessControlList> jobAcls, JspWriter out) throws IOException { // Display job-view-acls and job-modify-acls configured for this job out.print("<b>Job-ACLs:</b><br>"); for (JobACL aclName : JobACL.values()) { String aclConfigName = aclName.getAclName(); AccessControlList aclConfigured = jobAcls.get(aclName); if (aclConfigured != null) { String aclStr = aclConfigured.toString(); out.print("&nbsp;&nbsp;&nbsp;&nbsp;" + aclConfigName + ": " + aclStr + "<br>"); } } } static boolean privateActionsAllowed(JobConf conf) { return conf.getBoolean(PRIVATE_ACTIONS_KEY, false); } }
package com.logginghub.container.loader; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import org.apache.commons.io.IOUtils; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.util.logging.Logger; /** * @author cspiking */ public class JSONImportResolverPreProcessor implements PreProcessor { private static final String CONTAINER_OBJECT_ID = "container"; private static final String IMPORT_OBJECT_ID = "importJsonContainerFile"; private final Gson gson = new Gson(); @Override public InputStream preProcessFromInputStreamToInputStream(InputStream inputStream) { try { final JsonObject processedObjectToReturn = createNewContainerJsonObject(); final JsonArray arrayOfProcessedObjects = preProcessFromInputStreamToJsonObject(inputStream); processedObjectToReturn.add(CONTAINER_OBJECT_ID, arrayOfProcessedObjects); final String containerObjectJsonString = gson.toJson(processedObjectToReturn); return new ByteArrayInputStream(containerObjectJsonString.getBytes()); } catch (IOException e) { throw new RuntimeException(e); } } private JsonArray preProcessFromInputStreamToJsonObject(InputStream inputStream) throws IOException { final String inputStreamAsJsonString = IOUtils.toString(inputStream, Charset.defaultCharset()); return preProcessFromStringToString(inputStreamAsJsonString); } private JsonArray preProcessFromStringToString(String jsonString) throws IOException { final JsonObject jsonObject = gson.fromJson(jsonString, JsonObject.class); final JsonArray containerObjectArray = jsonObject.getAsJsonArray(CONTAINER_OBJECT_ID); final JsonArray destinationObjectArray = new JsonArray(); if(containerObjectArray != null) { for(JsonElement sourceJsonElement : containerObjectArray) { if(sourceJsonElement.isJsonObject()) { JsonElement importReferenceElement = sourceJsonElement.getAsJsonObject().get(IMPORT_OBJECT_ID); if(importReferenceElement != null && importReferenceElement.isJsonPrimitive()) { final String importFileName = importReferenceElement.getAsString(); final JsonArray importObjectArray = preProcessFromFileName(importFileName); destinationObjectArray.addAll(importObjectArray); } else { destinationObjectArray.add(sourceJsonElement); } } else { destinationObjectArray.add(sourceJsonElement); } } } else { throw new IllegalArgumentException("Json input does not contain 'container' object : " + jsonString); } return destinationObjectArray; } private final JsonArray preProcessFromFileName(String fileName) throws IOException { final InputStream importedFileAsStream = ClassLoader.getSystemResourceAsStream(fileName); return preProcessFromInputStreamToJsonObject(importedFileAsStream); } private JsonObject createNewContainerJsonObject() { final JsonObject jsonObject = new JsonObject(); jsonObject.add(CONTAINER_OBJECT_ID, new JsonArray()); return jsonObject; } }
package com.he.srs.util; import com.he.srs.bean.vo.BaseMsg; import com.he.srs.bean.vo.ProtoStuffActionEnum; import io.protostuff.ByteString; import io.protostuff.LinkedBuffer; import io.protostuff.ProtostuffIOUtil; import io.protostuff.Schema; import io.protostuff.runtime.RuntimeSchema; import lombok.ToString; import lombok.extern.slf4j.Slf4j; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * @author gaoweicong * @Description TODO * @CreateTime 2021/11/24 14:36 */ @Slf4j public final class ProtoStaffUtil { private static final int DEFAULT_BUFFER_SIZE = 1024; private static final LinkedBuffer BUFFER = LinkedBuffer.allocate(DEFAULT_BUFFER_SIZE); private static final Map<Class<?>, Schema<?>> SCHEMA_CACHE = new ConcurrentHashMap<Class<?>, Schema<?>>(); /** * 序列化消息 * * @author gaoweicong * @param action 类型 * @param data 数据 * @param <T> BaseMsg * @return byte[] */ @SuppressWarnings("unchecked") public static <T extends BaseMsg> byte[] serializeMsg(ProtoStuffActionEnum action, T data) { if (data == null) { throw new NullPointerException("the serialize data is null"); } ProtoStuffMsg protoStuffMsg = new ProtoStuffMsg(action.ordinal(), data); return protoStuffMsg.serialize(); } /** * 反序列化消息 * * @author gaoweicong * @param data 数据 * @param clazz 类 * @param <T> BaseMsg * @return T */ public static <T extends BaseMsg> T deserializeMsg(byte[] data, Class<T> clazz) { return ProtoStuffMsg.deserialize(data, clazz); } @SuppressWarnings("unchecked") private static synchronized <T extends BaseMsg> byte[] serialize(T data) { Class<T> clazz = (Class<T>) data.getClass(); Schema<T> schema = getSchema(clazz); byte[] result; try { result = ProtostuffIOUtil.toByteArray(data, schema, BUFFER); } finally { BUFFER.clear(); } return result; } private static <T extends BaseMsg> T deserialize(byte[] data, Class<T> clazz) { Schema<T> schema = getSchema(clazz); T result = schema.newMessage(); ProtostuffIOUtil.mergeFrom(data, result, schema); return result; } @SuppressWarnings("unchecked") private static <T extends BaseMsg> Schema<T> getSchema(Class<T> clazz) { Schema<T> schema = (Schema<T>) SCHEMA_CACHE.get(clazz); if (schema == null) { schema = RuntimeSchema.getSchema(clazz); if (schema != null) { SCHEMA_CACHE.put(clazz, schema); } } return schema; } @Slf4j @ToString private static final class ProtoStuffMsg<T extends BaseMsg> extends BaseMsg { /** * @see ProtoStuffActionEnum#ACTION_MQ * @see ProtoStuffActionEnum#ACTION_HTTP * @see ProtoStuffActionEnum#ACTION_FILE */ private int action; /** * biz data */ private ByteString bizData; private transient T data; private long bizDataLength; private long timestamp; private ProtoStuffMsg(Integer action, T data) { this.action = action; this.data = data; this.timestamp = System.currentTimeMillis(); } private byte[] serialize() { if (data != null) { this.bizData = ByteString.copyFrom(ProtoStaffUtil.serialize(data)); this.bizDataLength = (long) bizData.size(); } log.debug("ProtoStuffMsg serialize payload: {}", this); return ProtoStaffUtil.serialize(this); } private static <T extends BaseMsg> T deserialize(byte[] data, Class<T> clazz) { if (clazz.equals(ProtoStuffMsg.class)) { T result = ProtoStaffUtil.deserialize(data, clazz); log.debug("ProtoStuffMsg deserialize result:{}", result); return result; } ProtoStuffMsg protoStuffMsg = ProtoStaffUtil.deserialize(data, ProtoStuffMsg.class); log.debug("ProtoStuffMsg deserialize result: {}", protoStuffMsg); if (protoStuffMsg.bizData != null) { T result = ProtoStaffUtil.deserialize(protoStuffMsg.bizData.toByteArray(), clazz); log.debug("BaseMsg deserialize result: {}", result); return result; } log.warn("BaseMsg is null"); return null; } } }
package com.alibaba.smart.framework.engine.test.process; import java.lang.reflect.Field; import java.util.HashSet; import java.util.Set; import com.alibaba.fastjson.JSON; import com.alibaba.smart.framework.engine.configuration.VariablePersister; import com.alibaba.smart.framework.engine.constant.RequestMapSpecialKeyConstant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by 高海军 帝奇 74394 on 2017 October 07:00. */ public class CustomVariablePersister implements VariablePersister { private static final Logger LOGGER = LoggerFactory.getLogger(CustomVariablePersister.class); private static Set<String> hashSet = new HashSet(); static { try { Field[] declaredFields = RequestMapSpecialKeyConstant.class.getDeclaredFields(); for (Field declaredField : declaredFields) { String key= (String)declaredField.get(declaredField.getName()); hashSet.add(key); } } catch (IllegalAccessException e) { LOGGER.error(e.getMessage(),e); } //do something else. hashSet.add("text"); } @Override public boolean isPersisteVariableInstanceEnabled() { return true; } @Override public Set<String> getBlackList() { return hashSet; } @Override public String serialize(Object value) { return JSON.toJSONString(value); } @Override public <T> T deserialize(String text, Class<T> clazz) { return JSON.parseObject(text,clazz); } }
/* * This file is part of the Disco Deterministic Network Calculator v2.2.6 "Hydra". * * Copyright (C) 2014 - 2016 Steffen Bondorf * * disco | Distributed Computer Systems Lab * University of Kaiserslautern, Germany * * http://disco.cs.uni-kl.de * * * The Disco Deterministic Network Calculator (DiscoDNC) is free software; * you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; * either version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * */ package unikl.disco.nc; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import unikl.disco.curves.ArrivalCurve; import unikl.disco.curves.ServiceCurve; import unikl.disco.minplus.Convolution; import unikl.disco.minplus.Deconvolution; import unikl.disco.misc.SetUtils; import unikl.disco.network.Flow; import unikl.disco.network.Link; import unikl.disco.network.Network; import unikl.disco.network.Server; /** * * @author Steffen Bondorf * */ public class PmooArrivalBound_SinkTreeTbRl extends ArrivalBound { private PmooSinkTreeTbRlABCache ab_cache = new PmooSinkTreeTbRlABCache(); @SuppressWarnings("unused") private PmooArrivalBound_SinkTreeTbRl() {} public PmooArrivalBound_SinkTreeTbRl( Network network, AnalysisConfig configuration ) { this.network = network; this.configuration = configuration; } public void clearCache() { ab_cache = new PmooSinkTreeTbRlABCache(); } /** * * This code path computes the parameters relevant to construct the resulting arrival bound directly, * i.e., it does not compute and store the entire curves resulting from intermediate computations * in order to do so. * * @param link Link flow arrive on. * @param f_xfcaller Flows to bound. * @param flow_of_interest The flow of interest to handle with lowest priority. * @return Arrival bound. * @throws Exception Unable to get the flow's sub path for service curve convolution. */ public Set<ArrivalCurve> computeArrivalBound( Link link, Set<Flow> f_xfcaller, Flow flow_of_interest ) throws Exception { Set<ArrivalCurve> result = new HashSet<ArrivalCurve>(); // Get flows of interest Set<Flow> f_xfcaller_server = SetUtils.getIntersection( f_xfcaller, network.getFlows( link ) ); f_xfcaller_server.remove( flow_of_interest ); if ( f_xfcaller_server.isEmpty() ) { return result; } double R; double B; double sum_R = 0.0; double sum_B = 0.0; ArrivalCurve arrival_bound; for ( Flow f : f_xfcaller_server ) { R = 0.0; B = 0.0; arrival_bound = ab_cache.getEntry( link, f ); if( arrival_bound != null ) { R = arrival_bound.getSustainedRate().doubleValue(); B = arrival_bound.getBurst().doubleValue(); } else { R = f.getArrivalCurve().getSustainedRate().doubleValue(); B = f.getArrivalCurve().getBurst().doubleValue(); double sum_T = 0.0; for ( Server s : f.getSubPath( f.getSource(), link.getSource() ).getServers() ) { sum_T = sum_T + s.getServiceCurve().getLatency().doubleValue(); } B += R * sum_T; ab_cache.addEntry( link, f, ArrivalCurve.createTokenBucket( R, B ) ); } sum_R += R; sum_B += B; } result.add( ArrivalCurve.createTokenBucket( sum_R, sum_B ) ); return result; } /** * * This code path uses the DiscoDNC's convolution operation. * Thus, it operates on entire curves instead of restricting to the relevant values like * computeArrivalBound does. * * @param link Link flow arrive on. * @param f_xfcaller Flows to bound. * @param flow_of_interest The flow of interest to handle with lowest priority. * @return Arrival bound. * @throws Exception Unable to get the flow's sub path for service curve convolution. */ public Set<ArrivalCurve> computeArrivalBoundDeConvolution( Link link, Set<Flow> f_xfcaller, Flow flow_of_interest ) throws Exception { Set<ArrivalCurve> result = new HashSet<ArrivalCurve>(); // Get flows of interest Set<Flow> f_xfcaller_server = SetUtils.getIntersection( f_xfcaller, network.getFlows( link ) ); f_xfcaller_server.remove( flow_of_interest ); if ( f_xfcaller_server.isEmpty() ) { return result; } ArrivalCurve arrival_bound = ArrivalCurve.createNullArrival(); ArrivalCurve arrival_bound_f = ArrivalCurve.createNullArrival(); ServiceCurve sc_s_subpath = ServiceCurve.createZeroDelayInfiniteBurst(); for ( Flow f : f_xfcaller_server ) { arrival_bound_f = ab_cache.getEntry( link, f ); if( arrival_bound_f == null ) { sc_s_subpath = ServiceCurve.createZeroDelayInfiniteBurst(); for ( Server s : f.getSubPath( f.getSource(), link.getSource() ).getServers() ) { sc_s_subpath = Convolution.convolve( sc_s_subpath, s.getServiceCurve(), false ); // false -> generic convolution } arrival_bound_f = Deconvolution.deconvolve( f.getArrivalCurve(), sc_s_subpath, false ); // false -> generic deconvolution } ab_cache.addEntry( link, f, arrival_bound_f ); arrival_bound = ArrivalCurve.add( arrival_bound, arrival_bound_f ); } result.add( arrival_bound ); return result; } /** * * This code path uses the DiscoDNC's convolution operation, like computeArrivalBoundDeConvolutionTBRL does, * yet, it uses the optimized convolution and deconvolution operation, respectively, * to directly compute the relevant parameters defining the resulting curves. * * @param link Link flow arrive on. * @param f_xfcaller Flows to bound. * @param flow_of_interest The flow of interest to handle with lowest priority. * @return Arrival bound. * @throws Exception Unable to get the flow's sub path for service curve convolution. */ public Set<ArrivalCurve> computeArrivalBoundDeConvolutionTBRL( Link link, Set<Flow> f_xfcaller, Flow flow_of_interest ) throws Exception { Set<ArrivalCurve> result = new HashSet<ArrivalCurve>(); // Get flows of interest Set<Flow> f_xfcaller_server = SetUtils.getIntersection( f_xfcaller, network.getFlows( link ) ); f_xfcaller_server.remove( flow_of_interest ); if ( f_xfcaller_server.isEmpty() ) { return result; } ArrivalCurve arrival_bound = ArrivalCurve.createNullArrival(); ArrivalCurve arrival_bound_f = ArrivalCurve.createNullArrival(); ServiceCurve sc_s_subpath = ServiceCurve.createZeroDelayInfiniteBurst(); for ( Flow f : f_xfcaller_server ) { arrival_bound_f = ab_cache.getEntry( link, f ); if( arrival_bound_f == null ) { sc_s_subpath = ServiceCurve.createZeroDelayInfiniteBurst(); for ( Server s : f.getSubPath( f.getSource(), link.getSource() ).getServers() ) { sc_s_subpath = Convolution.convolve( sc_s_subpath, s.getServiceCurve(), true ); // true -> tb, rl optimized } arrival_bound_f = Deconvolution.deconvolve( f.getArrivalCurve(), sc_s_subpath, true ); // true -> tb, rl optimized } ab_cache.addEntry( link, f, arrival_bound_f ); arrival_bound = ArrivalCurve.add( arrival_bound, arrival_bound_f ); } result.add( arrival_bound ); return result; } /** * * In homogeneous networks we can simply multiply the common latency with the length of a flow's path * instead of iterating over its servers and sum up for each one's value individually. * * This code path works similar to computeArrivalBound in the sense that it does not operate on intermediate curves. * * @param link Link flow arrive on. * @param f_xfcaller Flows to bound. * @param flow_of_interest The flow of interest to handle with lowest priority. * @return Arrival bounds. */ public Set<ArrivalCurve> computeArrivalBoundHomogeneous( Link link, Set<Flow> f_xfcaller, Flow flow_of_interest ) { Set<ArrivalCurve> result = new HashSet<ArrivalCurve>(); // Get flows of interest Set<Flow> f_xfcaller_server = SetUtils.getIntersection( f_xfcaller, network.getFlows( link ) ); f_xfcaller_server.remove( flow_of_interest ); if ( f_xfcaller_server.size() == 0 ) { return result; } double sum_R = 0.0; double sum_B = 0.0; double sum_T = 0.0; // There's no need for a cache in this scenario for ( Flow f : f_xfcaller_server ) { sum_R += f.getArrivalCurve().getSustainedRate().doubleValue(); sum_T = f.getPath().numServers() * f.getSource().getServiceCurve().getLatency().doubleValue(); sum_B += f.getArrivalCurve().getBurst().doubleValue() + f.getArrivalCurve().getSustainedRate().doubleValue() * sum_T; } result.add( ArrivalCurve.createTokenBucket( sum_R, sum_B ) ); return result; } } // We use a specialized cache here that stores arrival bounds for single flows on specific links. // See // "Boosting Sensor Network Calculus by Thoroughly Bounding Cross-Traffic" (Steffen Bondorf and Jens B. Schmitt), // in Proc. 34th IEEE International Conference on Computer Communications (INFOCOM 2015). // for more details. class PmooSinkTreeTbRlABCache { Map<Link,Map<Flow,ArrivalCurve>> map__link__entries = new HashMap<Link,Map<Flow,ArrivalCurve>>(); protected ArrivalCurve getEntry( Link link, Flow flow ) { Map<Flow,ArrivalCurve> entries_link = map__link__entries.get( link ); if ( entries_link != null ) { return entries_link.get( flow ); } else { // Anticipated following addEntry entries_link = new HashMap<Flow,ArrivalCurve>(); map__link__entries.put( link, entries_link ); return null; } } protected void addEntry( Link link, Flow flow, ArrivalCurve arrival_bound ) { Map<Flow,ArrivalCurve> entries_link = map__link__entries.get( link ); if ( entries_link == null ) { entries_link = new HashMap<Flow,ArrivalCurve>(); map__link__entries.put( link, entries_link ); entries_link = map__link__entries.get( link ); } entries_link.put( flow, arrival_bound ); } }
/* * Copyright 2002-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.core.codec; import org.reactivestreams.Publisher; import org.springframework.core.ResolvableType; import org.springframework.core.io.buffer.DataBuffer; import org.springframework.core.io.buffer.DataBufferFactory; import org.springframework.lang.Nullable; import org.springframework.util.MimeType; import org.springframework.util.MimeTypeUtils; import reactor.core.publisher.Flux; import java.util.Map; /** * Simple pass-through encoder for {@link DataBuffer}s. * * @author Arjen Poutsma * @since 5.0 */ public class DataBufferEncoder extends AbstractEncoder<DataBuffer> { public DataBufferEncoder() { super(MimeTypeUtils.ALL); } @Override public boolean canEncode(ResolvableType elementType, @Nullable MimeType mimeType) { Class<?> clazz = elementType.resolve(Object.class); return super.canEncode(elementType, mimeType) && DataBuffer.class.isAssignableFrom(clazz); } @Override public Flux<DataBuffer> encode(Publisher<? extends DataBuffer> inputStream, DataBufferFactory bufferFactory, ResolvableType elementType, @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { return Flux.from(inputStream); } }
package com.example.noriter; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseExpandableListAdapter; import android.widget.TextView; import java.util.ArrayList; /** * Created by 전효승 on 2018-03-16. */ public class review_expandablelistview_adapter extends BaseExpandableListAdapter { private ArrayList<review_listview_item> listViewItemList = new ArrayList<review_listview_item>() ; public review_expandablelistview_adapter(){} @Override public int getGroupCount() { return listViewItemList.size(); } @Override public int getChildrenCount(int groupPosition) { return 1; } @Override public Object getGroup(int groupPosition) { return listViewItemList.get(groupPosition); } @Override public Object getChild(int groupPosition, int childPosition) { return listViewItemList.get(groupPosition); } @Override public long getGroupId(int groupPosition) { return groupPosition; } @Override public long getChildId(int groupPosition, int childPosition) { return groupPosition; } @Override public boolean hasStableIds() { return false; } @Override public View getGroupView(int groupPosition, boolean isExpanded, View convertView, ViewGroup parent) { final int pos = groupPosition; final Context context = parent.getContext(); // "listview_item" Layout을 inflate하여 convertView 참조 획득. if (convertView == null) { LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); convertView = inflater.inflate(R.layout.review_listview, parent, false); } // 화면에 표시될 View(Layout이 inflate된)으로부터 위젯에 대한 참조 획득 TextView nameTextView = (TextView) convertView.findViewById(R.id.review_listview_name) ; TextView dateTextView = (TextView) convertView.findViewById(R.id.review_listview_date) ; TextView titleTextView = (TextView) convertView.findViewById(R.id.review_listview_title) ; // Data Set(listViewItemList)에서 position에 위치한 데이터 참조 획득 review_listview_item listViewItem = listViewItemList.get(groupPosition); // 아이템 내 각 위젯에 데이터 반영 nameTextView.setText(listViewItem.getname()); dateTextView.setText(listViewItem.getdate()); titleTextView.setText(listViewItem.gettitle()); return convertView; } @Override public View getChildView(int groupPosition, int childPosition, boolean isLastChild, View convertView, ViewGroup parent) { final int pos = groupPosition; final Context context = parent.getContext(); // "listview_item" Layout을 inflate하여 convertView 참조 획득. if (convertView == null) { LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); convertView = inflater.inflate(R.layout.review_listview_contents, parent, false); } // 화면에 표시될 View(Layout이 inflate된)으로부터 위젯에 대한 참조 획득 TextView contentsTextView = (TextView) convertView.findViewById(R.id.review_listview_contents_contents) ; // Data Set(listViewItemList)에서 position에 위치한 데이터 참조 획득 review_listview_item listViewItem = listViewItemList.get(groupPosition); // 아이템 내 각 위젯에 데이터 반영 contentsTextView.setText(listViewItem.getContents()); return convertView; } @Override public boolean isChildSelectable(int groupPosition, int childPosition) { return false; } public void addItem(int rindex, int pindex, String name, String date, String title, String Contents) { review_listview_item item = new review_listview_item(); item.setrindex(rindex); item.setpindex(pindex); item.setname(name); item.setdate(date); item.settitle(title); item.setContents(Contents); listViewItemList.add(item); } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.util.duration; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.unitime.timetable.model.DatePattern; public class MeetingMinutes extends MeetingCountingDuration { protected double iLowerBound = 0.95; protected double iUpperBound = 1.10; public MeetingMinutes(String parameter) { if (parameter != null) { Matcher matcher = Pattern.compile(getParamterFormat()).matcher(parameter); if (matcher.find()) { iLowerBound = Double.parseDouble(matcher.group(1)); iUpperBound = Double.parseDouble(matcher.group(2)); } } } @Override public boolean check(int minutes, int semesterMinutes) { return iLowerBound * minutes <= semesterMinutes && semesterMinutes <= iUpperBound * minutes; } @Override public Integer getMaxMeetings(int minutes, int minutesPerMeeting) { return (int) Math.ceil(minutes / minutesPerMeeting); } @Override public String getParamterFormat() { return "([0-9]*\\.?[0-9]+),([0-9]*\\.?[0-9]+)"; } @Override public int getExactTimeMinutesPerMeeting(int minutes, DatePattern datePattern, int dayCode) { int meetings = nbrMeetings(datePattern, dayCode); return (meetings <= 0 ? 0 : minutes / meetings); } @Override public Integer getArrangedHours(int minutes, DatePattern datePattern) { if (minutes <= 0 || datePattern == null) return null; if (datePattern.getType() != null && datePattern.getType() == DatePattern.sTypePatternSet) { for (DatePattern child: datePattern.findChildren()) return Integer.valueOf(Math.round(minutes / (50f * child.getEffectiveNumberOfWeeks()))); } return Integer.valueOf(Math.round(minutes / (50f * datePattern.getEffectiveNumberOfWeeks()))); } }
/** * Copyright (C) 2012 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onebusaway.gtfs_merge.strategies; import java.util.Collection; import java.util.Set; import java.util.TimeZone; import org.onebusaway.gtfs.impl.calendar.CalendarServiceDataFactoryImpl; import org.onebusaway.gtfs.model.AgencyAndId; import org.onebusaway.gtfs.model.ServiceCalendar; import org.onebusaway.gtfs.model.ServiceCalendarDate; import org.onebusaway.gtfs.model.Trip; import org.onebusaway.gtfs.model.calendar.ServiceDate; import org.onebusaway.gtfs.services.GtfsMutableRelationalDao; import org.onebusaway.gtfs.services.GtfsRelationalDao; import org.onebusaway.gtfs_merge.GtfsMergeContext; import org.onebusaway.gtfs_merge.strategies.scoring.DuplicateScoringSupport; /** * Entity merge strategy for handling {@link ServiceCalendar} and * {@link ServiceCalendarDate} entities. We merge them at the same time since * they are both part of a larger entity collection identified by a single * {@code service_id} identifier. * * @author bdferris */ public class ServiceCalendarMergeStrategy extends AbstractCollectionEntityMergeStrategy<AgencyAndId> { public ServiceCalendarMergeStrategy() { super("calendar.txt/calendar_dates.txt service_id"); } @Override public void getEntityTypes(Collection<Class<?>> entityTypes) { entityTypes.add(ServiceCalendar.class); entityTypes.add(ServiceCalendarDate.class); } @Override protected Collection<AgencyAndId> getKeys(GtfsRelationalDao dao) { return dao.getAllServiceIds(); } /** * We consider two service calendars to be duplicates if they share a lot of * dates in common. * * This doesn't actually do so well when merging two feeds with different * service calendars (eg. before and after a schedule shakeup), which is a * trickier problem to solve. */ @Override protected double scoreDuplicateKey(GtfsMergeContext context, AgencyAndId key) { Set<ServiceDate> sourceServiceDates = getServiceDatesForServiceId( context.getSource(), key); Set<ServiceDate> targetServiceDates = getServiceDatesForServiceId( context.getTarget(), key); return DuplicateScoringSupport.scoreElementOverlap(sourceServiceDates, targetServiceDates); } /** * * @param dao * @param key * @return the set of active service dates for the specified service_id */ private Set<ServiceDate> getServiceDatesForServiceId(GtfsRelationalDao dao, AgencyAndId key) { CalendarServiceDataFactoryImpl factory = new CalendarServiceDataFactoryImpl(); factory.setGtfsDao(dao); return factory.getServiceDatesForServiceId(key, TimeZone.getDefault()); } /** * Replaces all references to the specified old service_id with the new * service_id for all {@link ServiceCalendar}, {@link ServiceCalendarDate}, * and {@link Trip} entities in the source feed. */ @Override protected void renameKey(GtfsMergeContext context, AgencyAndId oldId, AgencyAndId newId) { GtfsRelationalDao source = context.getSource(); ServiceCalendar calendar = source.getCalendarForServiceId(oldId); if (calendar != null) { calendar.setServiceId(newId); } for (ServiceCalendarDate calendarDate : source.getCalendarDatesForServiceId(oldId)) { calendarDate.setServiceId(newId); } for (Trip trip : source.getTripsForServiceId(oldId)) { trip.setServiceId(newId); } } /** * Writes all {@link ServiceCalendar} and {@link ServiceCalendarDate} entities * with the specified {@code service_id} to the merged output feed. */ @Override protected void saveElementsForKey(GtfsMergeContext context, AgencyAndId serviceId) { GtfsRelationalDao source = context.getSource(); GtfsMutableRelationalDao target = context.getTarget(); ServiceCalendar calendar = source.getCalendarForServiceId(serviceId); if (calendar != null) { calendar.setId(0); target.saveEntity(calendar); } for (ServiceCalendarDate calendarDate : source.getCalendarDatesForServiceId(serviceId)) { calendarDate.setId(0); target.saveEntity(calendarDate); } } }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE190_Integer_Overflow__long_max_postinc_61a.java Label Definition File: CWE190_Integer_Overflow.label.xml Template File: sources-sinks-61a.tmpl.java */ /* * @description * CWE: 190 Integer Overflow * BadSource: max Set data to the max value for long * GoodSource: A hardcoded non-zero, non-min, non-max, even number * Sinks: increment * GoodSink: Ensure there will not be an overflow before incrementing data * BadSink : Increment data, which can cause an overflow * Flow Variant: 61 Data flow: data returned from one method to another in different classes in the same package * * */ package testcases.CWE190_Integer_Overflow.s06; import testcasesupport.*; import javax.servlet.http.*; public class CWE190_Integer_Overflow__long_max_postinc_61a extends AbstractTestCase { public void bad() throws Throwable { long data = (new CWE190_Integer_Overflow__long_max_postinc_61b()).badSource(); /* POTENTIAL FLAW: if data == Long.MAX_VALUE, this will overflow */ data++; long result = (long)(data); IO.writeLine("result: " + result); } public void good() throws Throwable { goodG2B(); goodB2G(); } /* goodG2B() - use goodsource and badsink */ private void goodG2B() throws Throwable { long data = (new CWE190_Integer_Overflow__long_max_postinc_61b()).goodG2BSource(); /* POTENTIAL FLAW: if data == Long.MAX_VALUE, this will overflow */ data++; long result = (long)(data); IO.writeLine("result: " + result); } /* goodB2G() - use badsource and goodsink */ private void goodB2G() throws Throwable { long data = (new CWE190_Integer_Overflow__long_max_postinc_61b()).goodB2GSource(); /* FIX: Add a check to prevent an overflow from occurring */ if (data < Long.MAX_VALUE) { data++; long result = (long)(data); IO.writeLine("result: " + result); } else { IO.writeLine("data value is too large to increment."); } } /* Below is the main(). It is only used when building this testcase on * its own for testing or for building a binary to use in testing binary * analysis tools. It is not used when compiling all the testcases as one * application, which is how source code analysis tools are tested. */ public static void main(String[] args) throws ClassNotFoundException, InstantiationException, IllegalAccessException { mainFromParent(args); } }
package com.example; import com.alibaba.csp.sentinel.Entry; import com.alibaba.csp.sentinel.SphU; import com.alibaba.csp.sentinel.slots.block.BlockException; import com.alibaba.csp.sentinel.slots.block.RuleConstant; import com.alibaba.csp.sentinel.slots.block.flow.FlowRule; import com.alibaba.csp.sentinel.slots.block.flow.FlowRuleManager; import java.util.ArrayList; import java.util.List; public class Application { public static void main(String[] args) { new Application().start(); } private void start() { List<FlowRule> rules = new ArrayList<>(); FlowRule rule = new FlowRule(); rule.setResource("HelloWorld"); // set limit qps to 20 rule.setCount(20); rule.setGrade(RuleConstant.FLOW_GRADE_QPS); rules.add(rule); FlowRuleManager.loadRules(rules); for (int i = 0; i < 25; i++) { try (Entry entry = SphU.entry("HelloWorld")) { // Your business logic here. System.out.println("hello world " + (i + 1)); } catch (BlockException e) { // Handle rejected request. e.printStackTrace(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.implementation.graphmatching; /** * @author reto */ class GraphNotIsomorphicException extends Exception { }
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.guvnor.tools.utils.webdav; import org.eclipse.webdav.IResponse; /** * An Exception specifically for WebDav errors. Stores the response code and status message. */ public class WebDavException extends Exception { private static final long serialVersionUID = 510l; private int errCode; public WebDavException(IResponse response) { super("WebDav error: " + response.getStatusMessage() + //$NON-NLS-1$ " (" + response.getStatusCode() + ")"); //$NON-NLS-1$ //$NON-NLS-2$ this.errCode = response.getStatusCode(); } public int getErrorCode() { return errCode; } }
/* * DwpPropertyAttribute.java */ // COPYRIGHT_BEGIN // // The MIT License (MIT) // // Copyright (c) 2000-2020 Wizzer Works // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // // COPYRIGHT_END // Declare package. package com.wizzer.mle.studio.dwp.attribute; /** * This class is an abstract class, partially implementing an <code>Attribute</code> * for a Magic Lantern Digital Workprint Property item. * <p> * All Digital Workprint Property items represented as domain table Attributes are * subclassed from <code>DwpPropertyAttribute</code>. * </p> * * @author Mark S. Millard */ public abstract class DwpPropertyAttribute extends DwpNameTypeValueAttribute { /** * A constructor that initializes the value of the DWP Property Attribute. * * @param name The name of the DWP Property as a <code>String</code>. * @param type The type for the DWP Property as a <code>String</code>. * @param value The value for the DWP Property as an <code>Object</code>. * @param isReadOnly A flag indicating whether the attribute should be read-only or not. */ public DwpPropertyAttribute(String name, String type, Object value, boolean isReadOnly) { super("Property", null, isReadOnly); // Create a helper object to hold the Property value. Object attrValue = new DwpNameTypeValueAttributeValue(name,type,value); setValue(attrValue); // Set the number of significant bits. this.setBits(getValue().length() * 16); } /** * Get the <code>Attribute</code> type. * * @return <b>TYPE_DWP_PROPERTY</b> is always returned. */ public String getType() { return DwpItemAttribute.TYPE_DWP_PROPERTY; } }
package com.vega.springit.service; import org.springframework.stereotype.Service; import com.vega.springit.domain.Vote; import com.vega.springit.repository.VoteRepository; @Service public class VoteService { private final VoteRepository voteRepository; public VoteService(VoteRepository voteRepository) { this.voteRepository = voteRepository; } public Vote save(Vote vote) { return voteRepository.save(vote); } }
package com.tzx.client.changeskin.inflater; import android.content.Context; import android.util.ArrayMap; import android.util.AttributeSet; import android.view.InflateException; import android.view.View; import java.lang.reflect.Constructor; import java.util.Map; /** * Created by Tanzhenxing * Date: 2020-02-21 17:01 * Description: 自定义View解析的基类 */ public abstract class AbsLayoutInflater implements InflaterInterface { protected static final String[] sClassPrefixList = { "android.widget.", "android.view.", "android.webkit." }; protected static final Class<?>[] sConstructorSignature = new Class[]{Context.class, AttributeSet.class}; protected static final Map<String, Constructor<? extends View>> sConstructorMap = new ArrayMap<>(); protected final Object[] mConstructorArgs = new Object[2]; /** * 创建View构造 * @param context 上线文 * @param name view名称 * @param attrs view的属性标签 * @return */ protected View createViewFromTag(Context context, String name, AttributeSet attrs) { if ("view".equals(name)) { name = attrs.getAttributeValue(null, "class"); } try { mConstructorArgs[0] = context; mConstructorArgs[1] = attrs; if (-1 == name.indexOf('.')) { for (int i = 0; i < sClassPrefixList.length; i++) { final View view = createView(context, name, sClassPrefixList[i]); if (view != null) { return view; } } return null; } else { return createView(context, name, null); } } catch (Exception e) { // We do not want to catch these, lets return null and let the actual LayoutInflater // try return null; } finally { // Don't retain references on context. mConstructorArgs[0] = null; mConstructorArgs[1] = null; } } /** * 反射构造View * @param context 上下文环境 * @param name View名称 * @param prefix View的包名 * @return * @throws ClassNotFoundException * @throws InflateException */ private View createView(Context context, String name, String prefix) throws ClassNotFoundException, InflateException { Constructor<? extends View> constructor = sConstructorMap.get(name); try { if (constructor == null) { // Class not found in the cache, see if it's real, and try to add it Class<? extends View> clazz = context.getClassLoader().loadClass(prefix != null ? (prefix + name) : name).asSubclass(View.class); constructor = clazz.getConstructor(sConstructorSignature); sConstructorMap.put(name, constructor); } constructor.setAccessible(true); return constructor.newInstance(mConstructorArgs); } catch (Exception e) { e.printStackTrace(); // We do not want to catch these, lets return null and let the actual LayoutInflater // try return null; } } }
package com.nativegobarber; import android.app.Application; import android.content.Context; import com.facebook.react.PackageList; import com.facebook.react.ReactApplication; import com.BV.LinearGradient.LinearGradientPackage; import com.facebook.react.ReactNativeHost; import com.facebook.react.ReactPackage; import com.facebook.soloader.SoLoader; import java.lang.reflect.InvocationTargetException; import java.util.List; public class MainApplication extends Application implements ReactApplication { private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) { @Override public boolean getUseDeveloperSupport() { return BuildConfig.DEBUG; } @Override protected List<ReactPackage> getPackages() { @SuppressWarnings("UnnecessaryLocalVariable") List<ReactPackage> packages = new PackageList(this).getPackages(); // Packages that cannot be autolinked yet can be added manually here, for example: // packages.add(new MyReactNativePackage()); return packages; } @Override protected String getJSMainModuleName() { return "index"; } }; @Override public ReactNativeHost getReactNativeHost() { return mReactNativeHost; } @Override public void onCreate() { super.onCreate(); SoLoader.init(this, /* native exopackage */ false); initializeFlipper(this); // Remove this line if you don't want Flipper enabled } /** * Loads Flipper in React Native templates. * * @param context */ private static void initializeFlipper(Context context) { if (BuildConfig.DEBUG) { try { /* We use reflection here to pick up the class that initializes Flipper, since Flipper library is not available in release mode */ Class<?> aClass = Class.forName("com.facebook.flipper.ReactNativeFlipper"); aClass.getMethod("initializeFlipper", Context.class).invoke(null, context); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } } }
package ru.yandex.qatools.allure.jenkins.config; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import javax.annotation.Nonnull; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * eroshenkoam. * 30/07/14 */ public class AllureReportConfig implements Serializable { private String jdk; private String commandline; /** * @deprecated Please, someone write why is this deprecated? */ @Deprecated private String resultsPattern; private List<PropertyConfig> properties = new ArrayList<>(); private List<ResultsConfig> results; private ReportBuildPolicy reportBuildPolicy = ReportBuildPolicy.ALWAYS; private Boolean includeProperties = Boolean.TRUE; private String configPath = ""; @DataBoundConstructor public AllureReportConfig(List<ResultsConfig> results) { this.results = results == null ? Collections.<ResultsConfig>emptyList() : results; } @DataBoundSetter public void setJdk(final String jdk) { this.jdk = jdk; } public String getJdk() { return jdk; } @DataBoundSetter public void setCommandline(String commandline) { this.commandline = commandline; } public String getCommandline() { return commandline; } @Nonnull @SuppressWarnings("deprecation") public List<ResultsConfig> getResults() { if (StringUtils.isNotBlank(this.resultsPattern)) { this.results = convertPaths(this.resultsPattern); this.resultsPattern = null; } return results; } public List<PropertyConfig> getProperties() { return this.properties; } @DataBoundSetter public void setProperties(List<PropertyConfig> properties) { this.properties = properties; } public ReportBuildPolicy getReportBuildPolicy() { return this.reportBuildPolicy; } @DataBoundSetter public void setReportBuildPolicy(ReportBuildPolicy reportBuildPolicy) { this.reportBuildPolicy = reportBuildPolicy; } @DataBoundSetter public void setIncludeProperties(Boolean includeProperties) { this.includeProperties = includeProperties; } @DataBoundSetter public void setConfigPath(String configPath) { this.configPath = configPath; } public boolean getIncludeProperties() { return includeProperties; } public static AllureReportConfig newInstance(List<String> results) { return newInstance(null, null, null, results.toArray(new String[]{})); } public static AllureReportConfig newInstance(String jdk, String commandline, String configPath, String... paths) { return newInstance(jdk, commandline, configPath, Arrays.asList(paths)); } private static AllureReportConfig newInstance(String jdk, String commandline, String configPath, List<String> paths) { final List<ResultsConfig> results = convertPaths(paths); final AllureReportConfig config = new AllureReportConfig(results); config.setJdk(jdk); config.setCommandline(commandline); config.setIncludeProperties(true); config.setConfigPath(configPath); return config; } private static List<ResultsConfig> convertPaths(String paths) { return convertPaths(Arrays.asList(paths.split("\\n"))); } private static List<ResultsConfig> convertPaths(List<String> paths) { final List<ResultsConfig> results = new ArrayList<>(); for (String path : paths) { results.add(new ResultsConfig(path)); } return results; } }
package edu.tum.ase.compiler.e2e; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.ResultActions; import org.springframework.test.web.servlet.result.MockMvcResultMatchers; import edu.tum.ase.compiler.model.SourceCode; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @SpringBootTest @AutoConfigureMockMvc public class CompilerE2ERestTests { private final String URL = "/compile"; @Autowired private MockMvc systemUnderTest; @Autowired private ObjectMapper objectMapper; @Test public void should_ReturnCompilationResult_When_GivenCode() throws Exception { // given SourceCode sourceCode = new SourceCode("int main(){}", "test.c"); // when ResultActions result = systemUnderTest.perform(post(URL) .content(objectMapper.writeValueAsString(sourceCode)) .contentType(MediaType.APPLICATION_JSON)); // then result .andExpect(status().isOk()) .andExpect(MockMvcResultMatchers.jsonPath("$.compilable").value("true")) .andExpect(MockMvcResultMatchers.jsonPath("$.stderr").value("")); } @Test public void should_ReturnCompilationResult_When_GivenBadCode() throws Exception { // given SourceCode sourceCode = new SourceCode("x", "test.java"); // when ResultActions result = systemUnderTest.perform(post(URL) .content(objectMapper.writeValueAsString(sourceCode)) .contentType(MediaType.APPLICATION_JSON)); // then result .andExpect(status().isOk()) .andExpect(MockMvcResultMatchers.jsonPath("$.compilable").value("false")); } }
/* * MIT License * * Copyright (c) 2019 1619kHz * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.aquiver.mvc.router; import org.aquiver.RequestContext; import org.aquiver.RequestHandler; import org.aquiver.RouteRepeatException; import org.aquiver.mvc.http.HttpMethod; import org.aquiver.mvc.annotation.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.annotation.Annotation; import java.lang.invoke.MethodHandles; import java.lang.reflect.Method; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; /** * @author WangYi * @since 2020/5/23 */ public class RestfulRouter implements Router { private static final Logger log = LoggerFactory.getLogger(RestfulRouter.class); private final Map<String, RouteInfo> routes = new ConcurrentHashMap<>(64); private final MethodHandles.Lookup lookup = MethodHandles.lookup(); @Override public void registerRoute(String path, Object object) { try { Class<?> cls = object.getClass(); Method[] methods = cls.getDeclaredMethods(); for (Method method : methods) { Path methodPath = method.getAnnotation(Path.class); if (Objects.nonNull(methodPath)) { String completeUrl = this.getMethodUrl(path, methodPath.value()); this.registerRoute(cls, object, completeUrl, method, methodPath.method()); } registerRoute(cls, object, path, method); } } catch (Throwable throwable) { log.error("Register route exception", throwable); } } @Override public RouteInfo lookup(String url) { String lookupPath = url.endsWith("/") ? url.substring(0, url.length() - 1) : url; int paramStartIndex = lookupPath.indexOf("?"); if (paramStartIndex > 0) { lookupPath = lookupPath.substring(0, paramStartIndex); } if (lookupPath.equals("")) { lookupPath = "/"; } RouteInfo routeInfo = routes.get(url); if (Objects.nonNull(routeInfo)) { return routeInfo; } for (Map.Entry<String, RouteInfo> entry : routes.entrySet()) { String[] lookupPathSplit = lookupPath.split("/"); String[] mappingUrlSplit = entry.getKey().split("/"); String matcher = PathVarMatcher.getMatch(entry.getKey()); if (!lookupPath.startsWith(matcher) || lookupPathSplit.length != mappingUrlSplit.length) { continue; } if (PathVarMatcher.checkMatch(lookupPathSplit, mappingUrlSplit)) { return entry.getValue(); } } return routeInfo; } @Override public void registerRoute(String path, RequestHandler handler, HttpMethod httpMethod) { try { Class<? extends RequestHandler> ref = handler.getClass(); Method handle = ref.getMethod("handle", RequestContext.class); this.registerRoute(RequestHandler.class, handler, path, handle, httpMethod); } catch (NoSuchMethodException e) { log.error("There is no such method {}", "handle", e); } } /** * add route * * @param cls route class * @param url @GET/@POST.. value * @param method Mapping annotation annotation method * @throws Throwable reflection exception */ private void registerRoute(Class<?> cls, Object bean, String url, Method method) throws Throwable { Annotation[] annotations = method.getAnnotations(); if (annotations.length != 0) { for (Annotation annotation : annotations) { String routeUrl = "/"; Class<? extends Annotation> annotationType = annotation.annotationType(); Path path = annotationType.getAnnotation(Path.class); if (Objects.isNull(path)) { continue; } HttpMethod httpMethod = path.method(); Method valueMethod = annotationType.getMethod("value"); Object valueInvokeResult = lookup.unreflect(valueMethod).bindTo(annotation).invoke(); if (!Objects.isNull(valueInvokeResult) && !valueInvokeResult.equals(routeUrl)) { if (valueInvokeResult.equals("")) { valueInvokeResult = method.getName(); } routeUrl = String.join(routeUrl, String.valueOf(valueInvokeResult)); } String completeUrl = this.getMethodUrl(url, routeUrl); this.registerRoute(cls, bean, completeUrl, method, httpMethod); } } } /** * add route * * @param clazz route class * @param method Mapping annotation annotation method * @param httpMethod http method */ private void registerRoute(Class<?> clazz, Object bean, String completeUrl, Method method, HttpMethod httpMethod) { if (completeUrl.trim().isEmpty()) { return; } RouteInfo routeInfo = createRoute(clazz, bean, method, httpMethod, completeUrl); if (this.routes.containsKey(completeUrl)) { if (log.isDebugEnabled()) { log.debug("Registered request route URL is duplicated :{}", completeUrl); } throw new RouteRepeatException("Registered request route URL is duplicated : " + completeUrl); } else { this.registerRoute(completeUrl, routeInfo); } } /** * Create route * * @param clazz Route class * @param method Route method * @param httpMethod Route root path * @param completeUrl Complete route path * @return Route */ private RouteInfo createRoute(Class<?> clazz, Object bean, Method method, HttpMethod httpMethod, String completeUrl) { return RouteInfo.create(completeUrl, clazz, bean, method, httpMethod); } /** * add route * * @param url url * @param routeInfo Route info */ private void registerRoute(String url, RouteInfo routeInfo) { this.routes.put(url, routeInfo); } /** * Get the complete mapped address * * @param baseUrl The address of @Path or @RestPath on the class * @param methodMappingUrl Annotated address on method * @return complete mapped address */ protected String getMethodUrl(String baseUrl, String methodMappingUrl) { StringBuilder url = new StringBuilder(256); url.append((baseUrl == null || baseUrl.trim().isEmpty()) ? "" : baseUrl.trim()); if (Objects.nonNull(methodMappingUrl) && !methodMappingUrl.trim().isEmpty()) { String methodMappingUrlTrim = methodMappingUrl.trim(); if (!methodMappingUrlTrim.startsWith("/")) { methodMappingUrlTrim = "/" + methodMappingUrlTrim; } if (url.toString().endsWith("/")) { url.setLength(url.length() - 1); } url.append(methodMappingUrlTrim); } return url.toString(); } }
package org.vertexium.elasticsearch7.lucene; public class VertexiumSimpleNode extends SimpleNode implements QueryStringNode { public VertexiumSimpleNode(int i) { super(i); } public VertexiumSimpleNode(QueryParser p, int i) { super(p, i); } @Override public String toString() { StringBuilder ret = new StringBuilder(); ret.append("VertexiumSimpleNode{"); for (Token t = jjtGetFirstToken(); ; t = t.next) { if (t == null) { break; } ret.append(t.image); if (t == jjtGetLastToken()) { break; } } ret.append("}"); return ret.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.wan.misc; import static org.apache.geode.distributed.ConfigurationProperties.DISTRIBUTED_SYSTEM_ID; import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.apache.geode.distributed.ConfigurationProperties.START_LOCATOR; import static org.apache.geode.test.dunit.Assert.fail; import java.io.IOException; import java.util.Properties; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.client.ClientCacheFactory; import org.apache.geode.cache.client.PoolManager; import org.apache.geode.cache.client.internal.Connection; import org.apache.geode.cache.client.internal.PoolImpl; import org.apache.geode.cache.server.CacheServer; import org.apache.geode.cache.wan.GatewayReceiver; import org.apache.geode.cache.wan.GatewayReceiverFactory; import org.apache.geode.cache.wan.GatewaySender; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.internal.AvailablePortHelper; import org.apache.geode.internal.cache.PoolFactoryImpl; import org.apache.geode.internal.cache.wan.WANTestBase; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.junit.categories.WanTest; @Category({WanTest.class}) public class WANLocatorServerDUnitTest extends WANTestBase { static PoolImpl proxy; @Override protected final void postSetUpWANTestBase() throws Exception { final Host host = Host.getHost(0); } @Test public void test_3Locators_2Servers() { int port1 = AvailablePortHelper.getRandomAvailableTCPPort(); int port2 = AvailablePortHelper.getRandomAvailableTCPPort(); int port3 = AvailablePortHelper.getRandomAvailableTCPPort(); vm0.invoke(() -> WANLocatorServerDUnitTest.createLocator(port1, port2, port3, port1)); vm1.invoke(() -> WANLocatorServerDUnitTest.createLocator(port1, port2, port3, port2)); vm2.invoke(() -> WANLocatorServerDUnitTest.createLocator(port1, port2, port3, port3)); vm3.invoke(() -> WANLocatorServerDUnitTest.createReceiver(port1, port2, port3)); vm5.invoke(() -> WANLocatorServerDUnitTest.createClient(port1, port2, port3)); vm0.invoke(() -> WANLocatorServerDUnitTest.disconnect()); vm1.invoke(() -> WANLocatorServerDUnitTest.disconnect()); vm2.invoke(() -> WANLocatorServerDUnitTest.disconnect()); vm0.invoke(() -> WANLocatorServerDUnitTest.createLocator(port1, port2, port3, port1)); vm1.invoke(() -> WANLocatorServerDUnitTest.createLocator(port1, port2, port3, port2)); vm2.invoke(() -> WANLocatorServerDUnitTest.createLocator(port1, port2, port3, port3)); vm5.invoke(() -> WANLocatorServerDUnitTest.tryNewConnection()); } public static void createLocator(Integer port1, Integer port2, Integer port3, Integer startingPort) { WANTestBase test = new WANTestBase(); Properties props = test.getDistributedSystemProperties(); props.setProperty(MCAST_PORT, "0"); props.setProperty(DISTRIBUTED_SYSTEM_ID, "" + 1); props.setProperty(LOCATORS, "localhost[" + port1 + "],localhost[" + port2 + "],localhost[" + port3 + "]"); props.setProperty(START_LOCATOR, "localhost[" + startingPort + "],server=true,peer=true,hostname-for-clients=localhost"); test.getSystem(props); } public static void createReceiver(Integer port1, Integer port2, Integer port3) { WANTestBase test = new WANTestBase(); Properties props = test.getDistributedSystemProperties(); props.setProperty(MCAST_PORT, "0"); props.setProperty(LOCATORS, "localhost[" + port1 + "],localhost[" + port2 + "],localhost[" + port3 + "]"); InternalDistributedSystem ds = test.getSystem(props); cache = CacheFactory.create(ds); GatewayReceiverFactory fact = cache.createGatewayReceiverFactory(); int port = AvailablePortHelper.getRandomAvailableTCPPort(); fact.setStartPort(port); fact.setEndPort(port); fact.setManualStart(true); GatewayReceiver receiver = fact.create(); try { receiver.start(); } catch (IOException e) { fail("Test " + test.getName() + " failed to start GatewayReceiver on port " + port, e); } } public static void createServer(Integer port1, Integer port2, Integer port3) { WANTestBase test = new WANTestBase(); Properties props = test.getDistributedSystemProperties(); props.setProperty(MCAST_PORT, "0"); props.setProperty(LOCATORS, "localhost[" + port1 + "],localhost[" + port2 + "],localhost[" + port3 + "]"); InternalDistributedSystem ds = test.getSystem(props); cache = CacheFactory.create(ds); CacheServer server = cache.addCacheServer(); int port = AvailablePortHelper.getRandomAvailableTCPPort(); server.setPort(port); try { server.start(); } catch (IOException e) { fail("Test " + test.getName() + " failed to start CacheServer on port " + port, e); } LogWriterUtils.getLogWriter() .info("Server Started on port : " + port + " : server : " + server); } public static void disconnect() { WANTestBase test = new WANTestBase(); test.getSystem().disconnect(); } public static void createClient(Integer port1, Integer port2, Integer port3) { ClientCacheFactory cf = new ClientCacheFactory(); cache = (Cache) cf.create(); PoolFactoryImpl pf = (PoolFactoryImpl) PoolManager.createFactory(); pf.setReadTimeout(0); pf.setIdleTimeout(-1); pf.setMinConnections(4); pf.setServerGroup(GatewayReceiver.RECEIVER_GROUP); pf.addLocator("localhost", port1); pf.addLocator("localhost", port2); pf.addLocator("localhost", port3); pf.init((GatewaySender) null); proxy = ((PoolImpl) pf.create("KISHOR_POOL")); Connection con1 = proxy.acquireConnection(); try { con1.close(true); } catch (Exception e) { fail("createClient failed", e); } } public static void tryNewConnection() { Connection con1 = null; try { con1 = proxy.acquireConnection(); } catch (Exception e) { Assert.fail("No Exception expected", e); } } }
/*========================================================================= * * Copyright Insight Software Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ /** * Example on the use of the CannyEdgeDetectionImageFilter * */ import InsightToolkit.*; public class CannyEdgeDetectionImageFilter { public static void main( String argv[] ) { itkImageFileReaderF2_Pointer reader = itkImageFileReaderF2.itkImageFileReaderF2_New(); itkImageFileWriterUC2_Pointer writer = itkImageFileWriterUC2.itkImageFileWriterUC2_New(); itkCannyEdgeDetectionImageFilterF2F2_Pointer filter = itkCannyEdgeDetectionImageFilterF2F2.itkCannyEdgeDetectionImageFilterF2F2_New(); itkRescaleIntensityImageFilterF2UC2_Pointer outputCast = itkRescaleIntensityImageFilterF2UC2.itkRescaleIntensityImageFilterF2UC2_New(); filter.SetInput( reader.GetOutput() ); outputCast.SetInput( filter.GetOutput() ); writer.SetInput( outputCast.GetOutput() ); reader.SetFileName( argv[0] ); writer.SetFileName( argv[1] ); short outputMinimum = 0; short outputMaximum = 0; outputCast.SetOutputMinimum( outputMinimum ); outputCast.SetOutputMaximum( outputMaximum ); filter.SetVariance( Float.parseFloat( argv[2] ) ); filter.SetLowerThreshold( Float.parseFloat( argv[3] ) ); filter.SetUpperThreshold( Float.parseFloat( argv[4] ) ); writer.Update(); } }
package hw.hw6; public interface WeatherDisplay { public void update(WeatherData wd); public void setOutput(WeatherOutput wo); }
package goveed20.LiteraryAssociationApplication; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableScheduling; @SpringBootApplication @EnableAsync @EnableScheduling public class LiteraryAssociationApplication { public static void main(String[] args) { SpringApplication.run(LiteraryAssociationApplication.class, args); } }
/* * Tencent is pleased to support the open source community by making * MMKV available. * * Copyright (C) 2018 THL A29 Limited, a Tencent company. * All rights reserved. * * Licensed under the BSD 3-Clause License (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of * the License at * * https://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencent.mmkv; import android.app.ActivityManager; import android.content.ComponentName; import android.content.ContentProvider; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.content.pm.PackageManager; import android.content.pm.ProviderInfo; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.util.Log; public class MMKVContentProvider extends ContentProvider { static protected final String KEY = "KEY"; static protected final String KEY_SIZE = "KEY_SIZE"; static protected final String KEY_MODE = "KEY_MODE"; static protected final String KEY_CRYPT = "KEY_CRYPT"; static protected final String FUNCTION_NAME = "mmkvFromAshmemID"; static private Uri gUri; @Nullable static protected Uri contentUri(Context context) { if (MMKVContentProvider.gUri != null) { return MMKVContentProvider.gUri; } if (context == null) { return null; } String authority = queryAuthority(context); if (authority == null) { return null; } MMKVContentProvider.gUri = Uri.parse(ContentResolver.SCHEME_CONTENT + "://" + authority); return MMKVContentProvider.gUri; } private Bundle mmkvFromAshmemID(String ashmemID, int size, int mode, String cryptKey) { MMKV mmkv = MMKV.mmkvWithAshmemID(getContext(), ashmemID, size, mode, cryptKey); if (mmkv != null) { ParcelableMMKV parcelableMMKV = new ParcelableMMKV(mmkv); Log.i("MMKV", ashmemID + " fd = " + mmkv.ashmemFD() + ", meta fd = " + mmkv.ashmemMetaFD()); Bundle result = new Bundle(); result.putParcelable(MMKVContentProvider.KEY, parcelableMMKV); return result; } return null; } private static String queryAuthority(Context context) { try { ComponentName componentName = new ComponentName(context, MMKVContentProvider.class.getName()); PackageManager mgr = context.getPackageManager(); if (mgr != null) { ProviderInfo providerInfo = mgr.getProviderInfo(componentName, 0); return providerInfo.authority; } } catch (Exception e) { e.printStackTrace(); } return null; } @Override public boolean onCreate() { Context context = getContext(); if (context == null) { return false; } String authority = queryAuthority(context); if (authority == null) { return false; } if (MMKVContentProvider.gUri == null) { MMKVContentProvider.gUri = Uri.parse(ContentResolver.SCHEME_CONTENT + "://" + authority); } return true; } protected static String getProcessNameByPID(Context context, int pid) { ActivityManager manager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); if (manager != null) { for (ActivityManager.RunningAppProcessInfo processInfo : manager.getRunningAppProcesses()) { if (processInfo.pid == pid) { return processInfo.processName; } } } return ""; } @Nullable @Override public Bundle call(@NonNull String method, @Nullable String mmapID, @Nullable Bundle extras) { if (method.equals(MMKVContentProvider.FUNCTION_NAME)) { if (extras != null) { int size = extras.getInt(MMKVContentProvider.KEY_SIZE); int mode = extras.getInt(MMKVContentProvider.KEY_MODE); String cryptKey = extras.getString(MMKVContentProvider.KEY_CRYPT); return mmkvFromAshmemID(mmapID, size, mode, cryptKey); } } return null; } @Nullable @Override public String getType(@NonNull Uri uri) { return null; } @Nullable @Override public Cursor query(@NonNull Uri uri, @Nullable String[] projection, @Nullable String selection, @Nullable String[] selectionArgs, @Nullable String sortOrder) { throw new java.lang.UnsupportedOperationException("Not implement in MMKV"); } @Override public int update(@NonNull Uri uri, @Nullable ContentValues values, @Nullable String selection, @Nullable String[] selectionArgs) { throw new java.lang.UnsupportedOperationException("Not implement in MMKV"); } @Override public int delete(@NonNull Uri uri, @Nullable String selection, @Nullable String[] selectionArgs) { throw new java.lang.UnsupportedOperationException("Not implement in MMKV"); } @Nullable @Override public Uri insert(@NonNull Uri uri, @Nullable ContentValues values) { throw new java.lang.UnsupportedOperationException("Not implement in MMKV"); } }
public class Solution { // 树形 dp + 后序遍历,在遍历的过程中找到题目要求的最值 private int res = Integer.MIN_VALUE; public int maxPathSum(TreeNode root) { dfs(root); return res; } /** * @param node 返回的答案里 node 必须被选取,且路径只有一边,这是 dp 的无后效性应用 * @return */ private int dfs(TreeNode node) { if (node == null) { return 0; } // 注意 1:后序遍历 + 剪枝,把从下到上路径和为 0 的枝叶剪去 int leftPathSum = Math.max(0, dfs(node.left)); int rightPathSum = Math.max(0, dfs(node.right)); // 在遍历的过程中找到题目要求的最大路径和 res = Math.max(res, node.val + leftPathSum + rightPathSum); // 注意 2:这里是 dp 的思想(无后效性),node.val 必须被选取,且左右路径只选一条 return node.val + Math.max(leftPathSum, rightPathSum); } }
package com.ensoftcorp.open.cg.analysis; import java.util.LinkedList; import com.ensoftcorp.atlas.core.db.graph.Edge; import com.ensoftcorp.atlas.core.db.graph.Node; import com.ensoftcorp.atlas.core.db.set.AtlasHashSet; import com.ensoftcorp.atlas.core.db.set.AtlasSet; import com.ensoftcorp.atlas.core.indexing.IndexingUtil; import com.ensoftcorp.atlas.core.query.Q; import com.ensoftcorp.atlas.core.query.Query; import com.ensoftcorp.atlas.core.script.Common; import com.ensoftcorp.atlas.core.xcsg.XCSG; import com.ensoftcorp.open.cg.log.Log; import com.ensoftcorp.open.cg.preferences.CallGraphPreferences; import com.ensoftcorp.open.commons.analysis.SetDefinitions; import com.ensoftcorp.open.commons.utilities.CodeMapChangeListener; import com.ensoftcorp.open.java.commons.analysis.ThrowableAnalysis; import com.ensoftcorp.open.java.commons.analyzers.JavaProgramEntryPoints; /** * Performs an Exception Type Analysis (ETA), which is a modification * to RTA that considers inter-procedural exceptional flows. * * In terms of call graph construction precision this algorithm * ranks better than RTA but worse than a 0-CFA. * * Reference: http://web.cs.ucla.edu/~palsberg/paper/oopsla00.pdf * * @author Ben Holland */ public class ExceptionTypeAnalysis extends CGAnalysis { public static final String CALL = "ETA-CALL"; public static final String PER_CONTROL_FLOW = "ETA-PER-CONTROL-FLOW"; private static final String TYPES_SET = "ETA-TYPES"; private static ExceptionTypeAnalysis instance = null; protected ExceptionTypeAnalysis() { // exists only to defeat instantiation } private static CodeMapChangeListener codeMapChangeListener = null; public static ExceptionTypeAnalysis getInstance() { if (instance == null || (codeMapChangeListener != null && codeMapChangeListener.hasIndexChanged())) { instance = new ExceptionTypeAnalysis(); if(codeMapChangeListener == null){ codeMapChangeListener = new CodeMapChangeListener(); IndexingUtil.addListener(codeMapChangeListener); } else { codeMapChangeListener.reset(); } } return instance; } @Override protected void runAnalysis() { // first get the conservative call graph from CHA // for library calls, RTA uses CHA library call edges because assuming every that every type could be allocated // outside of the method and passed into the library is just an expensive way to end back up at CHA ClassHierarchyAnalysis cha = ClassHierarchyAnalysis.getInstance(); Q cgCHA = cha.getCallGraph(); // next create some subgraphs to work with Q typeHierarchy = Query.universe().edges(XCSG.Supertype); Q typeOfEdges = Query.universe().edges(XCSG.TypeOf); Q declarations = Query.universe().edges(XCSG.Contains); // create a worklist and add the root method set LinkedList<Node> worklist = new LinkedList<Node>(); AtlasSet<Node> mainMethods = JavaProgramEntryPoints.findMainMethods().eval().nodes(); if(CallGraphPreferences.isLibraryCallGraphConstructionEnabled() || mainMethods.isEmpty()){ if(!CallGraphPreferences.isLibraryCallGraphConstructionEnabled() && mainMethods.isEmpty()){ Log.warning("Application does not contain a main method, building a call graph using library assumptions."); } // if we are building a call graph for a library there is no main method... // a nice balance is to start with all public methods in the library AtlasSet<Node> rootMethods = SetDefinitions.app().nodesTaggedWithAll(XCSG.publicVisibility, XCSG.Method).eval().nodes(); for(Node method : rootMethods){ worklist.add(method); } } else { // under normal circumstances this algorithm would be given a single main method // but end users don't tend to think about this so consider any valid main method // as a program entry point if(mainMethods.size() > 1){ Log.warning("Application contains multiple main methods. The call graph may contain unexpected conservative edges as a result."); } for(Node mainMethod : mainMethods){ worklist.add(mainMethod); } } // initially the ETA based call graph is empty AtlasSet<Edge> cgETA = new AtlasHashSet<Edge>(); // iterate until the worklist is empty (in ETA the worklist only contains methods) while(!worklist.isEmpty()){ Node method = worklist.removeFirst(); // we should consider the allocation types instantiated directly in the method AtlasSet<Node> allocationTypes = getAllocationTypesSet(method); if(allocationTypes.isEmpty()){ // allocations are contained (declared) within the methods in the method reverse call graph Q methodDeclarations = declarations.forward(Common.toQ(method)); Q allocations = methodDeclarations.nodes(XCSG.Instantiation); // collect the types of each allocation allocationTypes.addAll(typeOfEdges.successors(allocations).eval().nodes()); // we should also include the allocation types of each parent method (in the current ETA call graph) // get compatible parent allocation types AtlasSet<Node> parentMethods = Common.toQ(cgETA).reverse(Common.toQ(method)).difference(Common.toQ(method)).eval().nodes(); for(Node parentMethod : parentMethods){ Q parentAllocationTypes = Common.toQ(getAllocationTypesSet(parentMethod)); // add the parameter type compatible allocation types allocationTypes.addAll(parentAllocationTypes.eval().nodes()); } } // for ETA we should inherit all allocation types from methods and their parents that // throw an exception that could be caught by this method Q potentialCatchBlocks = declarations.forward(Common.toQ(method)).nodes(XCSG.ControlFlow_Node); Q throwingMethods = declarations.reverse(ThrowableAnalysis.findThrowForCatch(potentialCatchBlocks)).nodes(XCSG.Method); throwingMethods = throwingMethods.difference(Common.toQ(method)); // only worried about exceptions that propagate back up the stack for(Node throwingMethod : throwingMethods.eval().nodes()){ Q throwerAllocationTypes = Common.toQ(getAllocationTypesSet(throwingMethod)); // add the parameter type compatible allocation types allocationTypes.addAll(throwerAllocationTypes.eval().nodes()); } // finally if this method throws an exception we should propagate those types to all // methods that could potentially catch it Q potentialThrowBlocks = declarations.forward(Common.toQ(method)).nodes(XCSG.ControlFlow_Node); Q catchingMethods = declarations.reverse(ThrowableAnalysis.findCatchForThrows(potentialThrowBlocks)).nodes(XCSG.Method); catchingMethods = catchingMethods.difference(Common.toQ(method)); // only worried about exceptions that propagate back up the stack for(Node catchingMethod : catchingMethods.eval().nodes()){ if(getAllocationTypesSet(catchingMethod).addAll(allocationTypes)){ if(!worklist.contains(catchingMethod)){ worklist.add(catchingMethod); } } } // next get a set of all the CHA call edges from the method and create an ETA edge // from the method to the target method in the CHA call graph if the target methods // type is compatible with the feasibly allocated types that would reach this method AtlasSet<Edge> callEdges = cgCHA.forwardStep(Common.toQ(method)).eval().edges(); for(Edge callEdge : callEdges){ // add static dispatches to the eta call graph // includes called methods marked static and constructors Node calledMethod = callEdge.to(); Node callingMethod = callEdge.from(); Q callingStaticDispatches = Common.toQ(callingMethod).contained().nodes(XCSG.StaticDispatchCallSite); boolean isStaticDispatch = !cha.getPerControlFlowGraph().predecessors(Common.toQ(calledMethod)).intersection(callingStaticDispatches).eval().nodes().isEmpty(); if(isStaticDispatch || calledMethod.taggedWith(XCSG.Constructor) || calledMethod.getAttr(XCSG.name).equals("<init>")){ updateCallGraph(worklist, cgETA, method, allocationTypes, callEdge, calledMethod); } else { // the call edge is a dynamic dispatch, need to resolve possible dispatches // a dispatch is possible if the type declaring the method is one of the // allocated types (or the parent of an allocated type) // note: we should consider the supertype hierarchy of the allocation types // because methods can be inherited from parent types Q typeDeclaringCalledMethod = declarations.predecessors(Common.toQ(calledMethod)); if(!typeHierarchy.forward(Common.toQ(allocationTypes)).intersection(typeDeclaringCalledMethod).eval().nodes().isEmpty()){ updateCallGraph(worklist, cgETA, method, allocationTypes, callEdge, calledMethod); } } } } // just tag each edge in the ETA call graph with "ETA" to distinguish it // from the CHA call graph Q pcfCHA = cha.getPerControlFlowGraph(); for(Edge xtaEdge : cgETA){ xtaEdge.tag(CALL); Node callingMethod = xtaEdge.from(); Node calledMethod = xtaEdge.to(); Q callsites = declarations.forward(Common.toQ(callingMethod)).nodes(XCSG.CallSite); for(Edge perControlFlowEdge : pcfCHA.betweenStep(callsites, Common.toQ(calledMethod)).eval().edges()){ perControlFlowEdge.tag(PER_CONTROL_FLOW); } } } /** * Updates the call graph and worklist for methods * @param worklist * @param cgRTA * @param method * @param allocationTypes * @param callEdge * @param calledMethod */ private static void updateCallGraph(LinkedList<Node> worklist, AtlasSet<Edge> cgRTA, Node method, AtlasSet<Node> allocationTypes, Edge callEdge, Node calledMethod) { if(Common.toQ(cgRTA).betweenStep(Common.toQ(method), Common.toQ(calledMethod)).eval().edges().isEmpty()){ cgRTA.add(callEdge); if(!worklist.contains(calledMethod)){ worklist.add(calledMethod); } } else { AtlasSet<Node> toAllocationTypes = getAllocationTypesSet(calledMethod); if(toAllocationTypes.addAll(allocationTypes)){ if(!worklist.contains(calledMethod)){ worklist.add(calledMethod); } } } } /** * Gets or creates the types set for a graph element * Returns a reference to the types set so that updates to the * set will also update the set on the graph element. * @param ge * @return */ @SuppressWarnings("unchecked") private static AtlasSet<Node> getAllocationTypesSet(Node ge){ if(ge.hasAttr(TYPES_SET)){ return (AtlasSet<Node>) ge.getAttr(TYPES_SET); } else { AtlasSet<Node> types = new AtlasHashSet<Node>(); ge.putAttr(TYPES_SET, types); return types; } } @Override public String[] getCallEdgeTags() { return new String[]{CALL, ClassHierarchyAnalysis.LIBRARY_CALL}; } @Override public String[] getPerControlFlowEdgeTags() { return new String[]{PER_CONTROL_FLOW, ClassHierarchyAnalysis.LIBRARY_PER_CONTROL_FLOW}; } @Override public String getName() { return "Exception Type Analysis"; } }
package com.greenbeat_33177; import android.app.Application; import android.content.Context; import com.facebook.react.PackageList; import com.facebook.react.ReactApplication; import com.facebook.react.ReactInstanceManager; import com.facebook.react.ReactNativeHost; import com.facebook.react.ReactPackage; import com.facebook.soloader.SoLoader; import java.lang.reflect.InvocationTargetException; import java.util.List; public class MainApplication extends Application implements ReactApplication { private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) { @Override public boolean getUseDeveloperSupport() { return BuildConfig.DEBUG; } @Override protected List<ReactPackage> getPackages() { @SuppressWarnings("UnnecessaryLocalVariable") List<ReactPackage> packages = new PackageList(this).getPackages(); // Packages that cannot be autolinked yet can be added manually here, for example: // packages.add(new MyReactNativePackage()); return packages; } @Override protected String getJSMainModuleName() { return "index"; } }; @Override public ReactNativeHost getReactNativeHost() { return mReactNativeHost; } @Override public void onCreate() { super.onCreate(); SoLoader.init(this, /* native exopackage */ false); initializeFlipper(this, getReactNativeHost().getReactInstanceManager()); } /** * Loads Flipper in React Native templates. Call this in the onCreate method with something like * initializeFlipper(this, getReactNativeHost().getReactInstanceManager()); * * @param context * @param reactInstanceManager */ private static void initializeFlipper( Context context, ReactInstanceManager reactInstanceManager) { if (BuildConfig.DEBUG) { try { /* We use reflection here to pick up the class that initializes Flipper, since Flipper library is not available in release mode */ Class<?> aClass = Class.forName("com.greenbeat_33177.ReactNativeFlipper"); aClass .getMethod("initializeFlipper", Context.class, ReactInstanceManager.class) .invoke(null, context, reactInstanceManager); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } } }
/* * SPDX-License-Identifier: Apache-2.0 * * The OpenSearch Contributors require contributions made to * this file be licensed under the Apache-2.0 license or a * compatible open source license. */ /* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * Modifications Copyright OpenSearch Contributors. See * GitHub history for details. */ //---------------------------------------------------- // THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST. //---------------------------------------------------- package org.opensearch.client.opensearch._global.termvectors; import jakarta.json.stream.JsonGenerator; import org.opensearch.client.json.DelegatingDeserializer; import org.opensearch.client.json.JsonpDeserializer; import org.opensearch.client.json.JsonpMapper; import org.opensearch.client.json.ObjectBuilderDeserializer; import org.opensearch.client.json.ToJsonp; import org.opensearch.client.util.ObjectBuilder; import javax.annotation.Nullable; // typedef: _global.termvectors.Filter public final class Filter implements ToJsonp { @Nullable private final Number maxDocFreq; @Nullable private final Number maxNumTerms; @Nullable private final Number maxTermFreq; @Nullable private final Number maxWordLength; @Nullable private final Number minDocFreq; @Nullable private final Number minTermFreq; @Nullable private final Number minWordLength; // --------------------------------------------------------------------------------------------- protected Filter(Builder builder) { this.maxDocFreq = builder.maxDocFreq; this.maxNumTerms = builder.maxNumTerms; this.maxTermFreq = builder.maxTermFreq; this.maxWordLength = builder.maxWordLength; this.minDocFreq = builder.minDocFreq; this.minTermFreq = builder.minTermFreq; this.minWordLength = builder.minWordLength; } /** * API name: {@code max_doc_freq} */ @Nullable public Number maxDocFreq() { return this.maxDocFreq; } /** * API name: {@code max_num_terms} */ @Nullable public Number maxNumTerms() { return this.maxNumTerms; } /** * API name: {@code max_term_freq} */ @Nullable public Number maxTermFreq() { return this.maxTermFreq; } /** * API name: {@code max_word_length} */ @Nullable public Number maxWordLength() { return this.maxWordLength; } /** * API name: {@code min_doc_freq} */ @Nullable public Number minDocFreq() { return this.minDocFreq; } /** * API name: {@code min_term_freq} */ @Nullable public Number minTermFreq() { return this.minTermFreq; } /** * API name: {@code min_word_length} */ @Nullable public Number minWordLength() { return this.minWordLength; } /** * Serialize this object to JSON. */ public void toJsonp(JsonGenerator generator, JsonpMapper mapper) { generator.writeStartObject(); toJsonpInternal(generator, mapper); generator.writeEnd(); } protected void toJsonpInternal(JsonGenerator generator, JsonpMapper mapper) { if (this.maxDocFreq != null) { generator.writeKey("max_doc_freq"); generator.write(this.maxDocFreq.doubleValue()); } if (this.maxNumTerms != null) { generator.writeKey("max_num_terms"); generator.write(this.maxNumTerms.doubleValue()); } if (this.maxTermFreq != null) { generator.writeKey("max_term_freq"); generator.write(this.maxTermFreq.doubleValue()); } if (this.maxWordLength != null) { generator.writeKey("max_word_length"); generator.write(this.maxWordLength.doubleValue()); } if (this.minDocFreq != null) { generator.writeKey("min_doc_freq"); generator.write(this.minDocFreq.doubleValue()); } if (this.minTermFreq != null) { generator.writeKey("min_term_freq"); generator.write(this.minTermFreq.doubleValue()); } if (this.minWordLength != null) { generator.writeKey("min_word_length"); generator.write(this.minWordLength.doubleValue()); } } // --------------------------------------------------------------------------------------------- /** * Builder for {@link Filter}. */ public static class Builder implements ObjectBuilder<Filter> { @Nullable private Number maxDocFreq; @Nullable private Number maxNumTerms; @Nullable private Number maxTermFreq; @Nullable private Number maxWordLength; @Nullable private Number minDocFreq; @Nullable private Number minTermFreq; @Nullable private Number minWordLength; /** * API name: {@code max_doc_freq} */ public Builder maxDocFreq(@Nullable Number value) { this.maxDocFreq = value; return this; } /** * API name: {@code max_num_terms} */ public Builder maxNumTerms(@Nullable Number value) { this.maxNumTerms = value; return this; } /** * API name: {@code max_term_freq} */ public Builder maxTermFreq(@Nullable Number value) { this.maxTermFreq = value; return this; } /** * API name: {@code max_word_length} */ public Builder maxWordLength(@Nullable Number value) { this.maxWordLength = value; return this; } /** * API name: {@code min_doc_freq} */ public Builder minDocFreq(@Nullable Number value) { this.minDocFreq = value; return this; } /** * API name: {@code min_term_freq} */ public Builder minTermFreq(@Nullable Number value) { this.minTermFreq = value; return this; } /** * API name: {@code min_word_length} */ public Builder minWordLength(@Nullable Number value) { this.minWordLength = value; return this; } /** * Builds a {@link Filter}. * * @throws NullPointerException * if some of the required fields are null. */ public Filter build() { return new Filter(this); } } // --------------------------------------------------------------------------------------------- /** * Json deserializer for Filter */ public static final JsonpDeserializer<Filter> DESERIALIZER = ObjectBuilderDeserializer.createForObject(Builder::new, Filter::setupFilterDeserializer); protected static void setupFilterDeserializer(DelegatingDeserializer<Filter.Builder> op) { op.add(Builder::maxDocFreq, JsonpDeserializer.numberDeserializer(), "max_doc_freq"); op.add(Builder::maxNumTerms, JsonpDeserializer.numberDeserializer(), "max_num_terms"); op.add(Builder::maxTermFreq, JsonpDeserializer.numberDeserializer(), "max_term_freq"); op.add(Builder::maxWordLength, JsonpDeserializer.numberDeserializer(), "max_word_length"); op.add(Builder::minDocFreq, JsonpDeserializer.numberDeserializer(), "min_doc_freq"); op.add(Builder::minTermFreq, JsonpDeserializer.numberDeserializer(), "min_term_freq"); op.add(Builder::minWordLength, JsonpDeserializer.numberDeserializer(), "min_word_length"); } }
/* * * * Copyright 2020 New Relic Corporation. All rights reserved. * * SPDX-License-Identifier: Apache-2.0 * */ package com.agent.instrumentation.solr; import com.codahale.metrics.Metric; import com.newrelic.api.agent.NewRelic; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.MetricsMap; import java.util.Map; public class CacheMetric extends NRMetric { MetricsMap metric = null; String metricType = null; public CacheMetric(String mt, String r, Metric m, SolrInfoBean b) { super(r, b); metricType = mt; if (MetricsMap.class.isInstance(m)) { metric = (MetricsMap) m; } } @Override public String getMetricName(String name) { return getMetricBase() + "/" + name; } @Override public int reportMetrics() { int numMetrics = 0; Map<String, Object> map = metric.getValue(); for (String key : map.keySet()) { Object obj = map.get(key); if (Number.class.isInstance(obj)) { Number num = (Number) obj; NewRelic.recordMetric(getMetricName(key), num.floatValue()); numMetrics++; } } return numMetrics; } @Override public String getMetricBase() { return prefix + registry + "/" + metricType + "/" + info.getName(); } }
/* * (C) Copyright 2015 Kai Burjack Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.joml.camera; import org.joml.Matrix4f; /** * A simple arcball camera, which provides smooth acceleration/velocity/elapsed-time based movement/zoom and rotation. * <p> * It makes use of the {@link Vector3Mover} to follow the {@link #center(float, float, float) center} position and * uses the {@link ArcRotor} for the rotation angles. * * @author Kai Burjack */ public class ArcBallCamera { public Vector3Mover centerMover = new Vector3Mover(); { centerMover.maxDirectAcceleration = 5.0f; centerMover.maxDirectDeceleration = 5.0f; } public final ArcRotor alphaMover = new ArcRotor(); public final ArcRotor betaMover = new ArcRotor(); public final ScalarMover zoomMover = new ScalarMover(); { zoomMover.current = 10.0f; zoomMover.target = 10.0f; zoomMover.maxAcceleration = 10.0f; zoomMover.maxDeceleration = 15.0f; } /** * Apply the camera's view transformation to the given matrix by post-multiplying it. * * @param mat * the matrix which gets post-multiplied by the camera's view transformation matrix * @return the supplied matrix */ public Matrix4f viewMatrix(Matrix4f mat) { /* * Explanation: * - First, translate the center position back to the origin, so that we can rotate about it * - Then, rotate first about Y and then about X (this will ensure that "right" is always parallel to the world's XZ-plane) * - Next, translate the camera back by its distance to the center (the radius of the arcball) */ return mat.translate(0, 0, (float) -zoomMover.current) .rotateX((float) betaMover.current) .rotateY((float) alphaMover.current) .translate(-centerMover.current.x, -centerMover.current.y, -centerMover.current.z); } public void setAlpha(double alpha) { alphaMover.target = alpha % (2.0 * Math.PI); } public void setBeta(double beta) { if (beta < -Math.PI / 2.0) { beta = -Math.PI / 2.0; } else if (beta > Math.PI / 2.0) { beta = Math.PI / 2.0; } betaMover.target = beta; } public double getAlpha() { return alphaMover.target; } public double getBeta() { return betaMover.target; } public void zoom(double zoom) { zoomMover.target = zoom; } public void center(float x, float y, float z) { centerMover.target.set(x, y, z); } public void update(float elapsedTimeInSeconds) { alphaMover.update(elapsedTimeInSeconds); betaMover.update(elapsedTimeInSeconds); zoomMover.update(elapsedTimeInSeconds); centerMover.update(elapsedTimeInSeconds); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.project.libraries; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; import org.netbeans.api.project.ProjectManager; import org.netbeans.api.project.libraries.Library; import org.netbeans.api.project.libraries.LibraryManager; import org.netbeans.junit.NbTestCase; import org.netbeans.junit.RandomlyFails; import org.netbeans.modules.project.libraries.TestEntityCatalog; import org.netbeans.modules.project.libraries.LibrariesTestUtil.TestLibraryTypeProvider; import org.netbeans.spi.project.libraries.LibraryImplementation; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; import org.openide.util.Exceptions; import org.openide.util.Mutex.Action; import org.openide.util.test.MockLookup; /** * * @author Jaroslav Tulach <jtulach@netbeans.org> */ public class LibrariesStorageDeadlock166109Test extends NbTestCase { private static final int TIMEOUT = Integer.getInteger("LibrariesTest.timeout", 5000); //NOI18N static final Logger LOG = Logger.getLogger(LibrariesStorageDeadlock166109Test.class.getName()); private FileObject storageFolder; public LibrariesStorageDeadlock166109Test(String name) { super(name); } @Override protected void setUp() throws Exception { MockLookup.setInstances( new TestEntityCatalog(), new LibrariesTestUtil.MockProjectManager(), new LibraryTypeRegistryImpl()); storageFolder = FileUtil.getConfigFile("org-netbeans-api-project-libraries/Libraries"); assertNotNull("storageFolder found", storageFolder); } @RandomlyFails public void testDeadlock() throws Exception { Library[] arr = LibraryManager.getDefault().getLibraries(); assertEquals("Empty", 0, arr.length); LibrariesTestUtil.createLibraryDefinition(storageFolder,"Library1", null); Library[] arr0 = LibraryManager.getDefault().getLibraries(); assertEquals("Still Empty", 0, arr0.length); final CountDownLatch event = new CountDownLatch(1); PropertyChangeListener l = new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { event.countDown(); } }; LibraryManager.getDefault().addPropertyChangeListener(l); try { LibrariesTestUtil.registerLibraryTypeProvider(TestMutexLibraryTypeProvider.class); assertTrue(event.await(TIMEOUT, TimeUnit.MILLISECONDS)); } finally { LibraryManager.getDefault().removePropertyChangeListener(l); } // TBD: There is another problem in the code. When a provider is added, // but it is not yet processed, the getLibraries() method uses cache and // thus can yield wrong results. To workaround that (and simulate the // deadlock) here is direct call to reset the cache. // Ideally it shall not be necessary for arr1 to have length 1 Library[] arr1 = LibraryManager.getDefault().getLibraries(); assertEquals("One", 1, arr1.length); } public static final class TestMutexLibraryTypeProvider extends TestLibraryTypeProvider { public TestMutexLibraryTypeProvider() { LOG.info("TestMutexLibraryTypeProvider created"); } @Override public LibraryImplementation createLibrary() { assertFalse("No Hold lock", Thread.holdsLock(LibraryManager.getDefault())); assertFalse("No mutex", ProjectManager.mutex().isReadAccess()); assertFalse("No mutex write", ProjectManager.mutex().isWriteAccess()); try { LibrariesTestUtil.registerLibraryTypeProvider(TestLibraryTypeProvider.class); Thread.sleep(500); } catch (Exception ex) { Exceptions.printStackTrace(ex); } return ProjectManager.mutex().writeAccess(new Action<LibraryImplementation>() { public LibraryImplementation run() { return TestMutexLibraryTypeProvider.super.createLibrary(); } }); } } }
package me.yamakaja.irc.client.network.packet.client.command.topic; import me.yamakaja.irc.client.network.packet.client.ClientboundPacketType; import me.yamakaja.irc.client.network.packet.client.command.PacketCommandResponse; import java.util.Date; /** * Created by Yamakaja on 04.02.17. */ public class PacketClientTopicSetInformation extends PacketCommandResponse { private String channel; private String setter; private Date time; @Override public void read(String data) { String[] split = data.split(" "); channel = split[3]; setter = split[4]; if (setter.contains("!")) setter = setter.substring(0, setter.indexOf("!")); time = new Date(Long.parseLong(split[5]) * 1000); } @Override public ClientboundPacketType getPacketType() { return ClientboundPacketType.RPL_TOPICINFO; } public String getChannel() { return channel; } public String getSetter() { return setter; } public Date getTime() { return time; } }
/* * The MIT License (MIT) * * Copyright (c) 2016 Jun Gong * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.dtflys.forest.http; /** * @author gongjun[jun.gong@thebeastshop.com] * @since 2017-05-12 17:35 */ public class HttpStatus { public static final int CONTINUE = 100; public static final int SWITCHING_PROTOCOLS = 101; public static final int PROCESSING = 102; // --- 2xx Success --- public static final int OK = 200; public static final int CREATED = 201; public static final int ACCEPTED = 202; public static final int NON_AUTHORITATIVE_INFORMATION = 203; public static final int NO_CONTENT = 204; public static final int RESET_CONTENT = 205; public static final int PARTIAL_CONTENT = 206; public static final int MULTI_STATUS = 207; // --- 3xx Redirection --- public static final int MULTIPLE_CHOICES = 300; public static final int MOVED_PERMANENTLY = 301; public static final int MOVED_TEMPORARILY = 302; public static final int SEE_OTHER = 303; public static final int NOT_MODIFIED = 304; public static final int USE_PROXY = 305; public static final int TEMPORARY_REDIRECT = 307; // --- 4xx Client Error --- public static final int BAD_REQUEST = 400; public static final int UNAUTHORIZED = 401; public static final int PAYMENT_REQUIRED = 402; public static final int FORBIDDEN = 403; public static final int NOT_FOUND = 404; public static final int METHOD_NOT_ALLOWED = 405; public static final int NOT_ACCEPTABLE = 406; public static final int PROXY_AUTHENTICATION_REQUIRED = 407; public static final int REQUEST_TIMEOUT = 408; public static final int CONFLICT = 409; public static final int GONE = 410; public static final int LENGTH_REQUIRED = 411; public static final int PRECONDITION_FAILED = 412; public static final int REQUEST_TOO_LONG = 413; public static final int REQUEST_URI_TOO_LONG = 414; public static final int UNSUPPORTED_MEDIA_TYPE = 415; public static final int REQUESTED_RANGE_NOT_SATISFIABLE = 416; public static final int EXPECTATION_FAILED = 417; public static final int INSUFFICIENT_SPACE_ON_RESOURCE = 419; public static final int METHOD_FAILURE = 420; public static final int UNPROCESSABLE_ENTITY = 422; public static final int LOCKED = 423; public static final int FAILED_DEPENDENCY = 424; // --- 5xx Server Error --- public static final int INTERNAL_SERVER_ERROR = 500; public static final int NOT_IMPLEMENTED = 501; public static final int BAD_GATEWAY = 502; public static final int SERVICE_UNAVAILABLE = 503; public static final int GATEWAY_TIMEOUT = 504; public static final int HTTP_VERSION_NOT_SUPPORTED = 505; public static final int INSUFFICIENT_STORAGE = 507; }
package sample; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.vertx.core.*; import io.vertx.core.AbstractVerticle; import io.vertx.core.json.JsonObject; import io.vertx.ext.web.Router; public class App extends AbstractVerticle { private static final Logger logger = LoggerFactory.getLogger(App.class); @Override public void start() { Router router = Router.router(vertx); router.get("/").handler(ctx -> { ctx.response().putHeader("Content-Type", "text/plain").end("This is the root resource"); }); router.get("/plop").handler(ctx -> { ctx.response() .putHeader("Content-Type", "application/json") .end(new JsonObject().put("what", "Plop").encodePrettily()); }); vertx.createHttpServer() .requestHandler(router::accept) .listen(8080); } public static void main(String[] args) { logger.info("Starting..."); Vertx vertx = Vertx.vertx(); vertx.deployVerticle(new App(), id -> { if (id.succeeded()) { logger.info("App verticle successfully deployed"); } else { logger.error("Deployment of App verticle failed", id.cause()); } }); } }
/** * Copyright (c) 2011-2020, hubin (jobob@qq.com). * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.baomidou.mybatisplus.generator.config; import java.util.List; import com.baomidou.mybatisplus.generator.config.po.TableFill; import com.baomidou.mybatisplus.generator.config.rules.NamingStrategy; import com.baomidou.mybatisplus.toolkit.StringUtils; /** * <p> * 策略配置项 * </p> * * @author YangHu, tangguo, hubin * @since 2016/8/30 */ public class StrategyConfig { /** * 表名、字段名、是否使用下划线命名(默认 false) */ public static boolean DB_COLUMN_UNDERLINE = false; /** * 是否大写命名 */ private boolean isCapitalMode = false; /** * 数据库表映射到实体的命名策略 */ private NamingStrategy naming = NamingStrategy.nochange; /** * 表前缀 */ private String[] tablePrefix; /** * 自定义继承的Entity类全称,带包名 */ private String superEntityClass; /** * 自定义基础的Entity类,公共字段 */ private String[] superEntityColumns; /** * 自定义继承的Mapper类全称,带包名 */ private String superMapperClass = ConstVal.SUPERD_MAPPER_CLASS; /** * 自定义继承的Service类全称,带包名 */ private String superServiceClass = ConstVal.SUPERD_SERVICE_CLASS; /** * 自定义继承的ServiceImpl类全称,带包名 */ private String superServiceImplClass = ConstVal.SUPERD_SERVICEIMPL_CLASS; /** * 自定义继承的Controller类全称,带包名 */ private String superControllerClass; /* * 需要包含的表名(与exclude二选一配置) */ private String[] include = null; /** * 需要排除的表名 */ private String[] exclude = null; /** * 【实体】是否生成字段常量(默认 false)<br> * -----------------------------------<br> * public static final String ID = "test_id"; */ private boolean entityColumnConstant = false; /** * 【实体】是否为构建者模型(默认 false)<br> * -----------------------------------<br> * public User setName(String name) { this.name = name; return this; } */ private boolean entityBuilderModel = false; /** * 【实体】是否为lombok模型(默认 false)<br> * <a href="https://projectlombok.org/">document</a> */ private boolean entityLombokModel = false; /** * Boolean类型字段是否移除is前缀(默认 false)<br> * 比如 : 数据库字段名称 : 'is_xxx',类型为 : tinyint. 在映射实体的时候则会去掉is,在实体类中映射最终结果为 xxx */ private boolean entityBooleanColumnRemoveIsPrefix = false; /** * 生成 <code>@RestController</code> 控制器 * * <pre> * <code>@Controller</code> -> <code>@RestController</code> * </pre> */ private boolean restControllerStyle = false; /** * 驼峰转连字符 * * <pre> * <code>@RequestMapping("/managerUserActionHistory")</code> -> <code>@RequestMapping("/manager-user-action-history")</code> * </pre> */ private boolean controllerMappingHyphenStyle = false; /** * 逻辑删除属性名称 */ private String logicDeleteFieldName; /** * 表填充字段 */ private List<TableFill> tableFillList = null; public StrategyConfig setDbColumnUnderline(boolean dbColumnUnderline) { DB_COLUMN_UNDERLINE = dbColumnUnderline; return this; } /** * <p> * 大写命名、字段符合大写字母数字下划线命名 * </p> * * @param word 待判断字符串 * @return */ public boolean isCapitalModeNaming(String word) { return isCapitalMode && StringUtils.isCapitalMode(word); } /** * <p> * 表名称包含指定前缀 * </p> * * @param tableName 表名称 * @return */ public boolean containsTablePrefix(String tableName) { if (null != tableName) { String[] tps = getTablePrefix(); if (null != tps) { for (String tp : tps) { if (tableName.contains(tp)) { return true; } } } } return false; } public boolean isCapitalMode() { return isCapitalMode; } public StrategyConfig setCapitalMode(boolean isCapitalMode) { this.isCapitalMode = isCapitalMode; return this; } public NamingStrategy getNaming() { return naming; } public StrategyConfig setNaming(NamingStrategy naming) { this.naming = naming; return this; } public String[] getTablePrefix() { return tablePrefix; } public StrategyConfig setTablePrefix(String[] tablePrefix) { this.tablePrefix = tablePrefix; return this; } public String getSuperEntityClass() { return superEntityClass; } public StrategyConfig setSuperEntityClass(String superEntityClass) { this.superEntityClass = superEntityClass; return this; } public boolean includeSuperEntityColumns(String fieldName) { if (null != superEntityColumns) { for (String column : superEntityColumns) { if (column.contains(fieldName)) { return true; } } } return false; } public String[] getSuperEntityColumns() { return superEntityColumns; } public StrategyConfig setSuperEntityColumns(String[] superEntityColumns) { this.superEntityColumns = superEntityColumns; return this; } public String getSuperMapperClass() { return superMapperClass; } public StrategyConfig setSuperMapperClass(String superMapperClass) { this.superMapperClass = superMapperClass; return this; } public String getSuperServiceClass() { return superServiceClass; } public StrategyConfig setSuperServiceClass(String superServiceClass) { this.superServiceClass = superServiceClass; return this; } public String getSuperServiceImplClass() { return superServiceImplClass; } public StrategyConfig setSuperServiceImplClass(String superServiceImplClass) { this.superServiceImplClass = superServiceImplClass; return this; } public String getSuperControllerClass() { return superControllerClass; } public StrategyConfig setSuperControllerClass(String superControllerClass) { this.superControllerClass = superControllerClass; return this; } public String[] getInclude() { return include; } public StrategyConfig setInclude(String[] include) { this.include = include; return this; } public String[] getExclude() { return exclude; } public StrategyConfig setExclude(String[] exclude) { this.exclude = exclude; return this; } public boolean isEntityColumnConstant() { return entityColumnConstant; } public StrategyConfig setEntityColumnConstant(boolean entityColumnConstant) { this.entityColumnConstant = entityColumnConstant; return this; } public boolean isEntityBuilderModel() { return entityBuilderModel; } public StrategyConfig setEntityBuilderModel(boolean entityBuilderModel) { this.entityBuilderModel = entityBuilderModel; return this; } public boolean isEntityLombokModel() { return entityLombokModel; } public StrategyConfig setEntityLombokModel(boolean entityLombokModel) { this.entityLombokModel = entityLombokModel; return this; } public boolean isEntityBooleanColumnRemoveIsPrefix() { return entityBooleanColumnRemoveIsPrefix; } public StrategyConfig setEntityBooleanColumnRemoveIsPrefix( boolean entityBooleanColumnRemoveIsPrefix) { this.entityBooleanColumnRemoveIsPrefix = entityBooleanColumnRemoveIsPrefix; return this; } public boolean isRestControllerStyle() { return restControllerStyle; } public StrategyConfig setRestControllerStyle(boolean restControllerStyle) { this.restControllerStyle = restControllerStyle; return this; } public boolean isControllerMappingHyphenStyle() { return controllerMappingHyphenStyle; } public StrategyConfig setControllerMappingHyphenStyle(boolean controllerMappingHyphenStyle) { this.controllerMappingHyphenStyle = controllerMappingHyphenStyle; return this; } public String getLogicDeleteFieldName() { return logicDeleteFieldName; } public StrategyConfig setLogicDeleteFieldName(String logicDeletePropertyName) { this.logicDeleteFieldName = logicDeleteFieldName; return this; } public List<TableFill> getTableFillList() { return tableFillList; } public StrategyConfig setTableFillList(List<TableFill> tableFillList) { this.tableFillList = tableFillList; return this; } }
/* This file has been generated by Stubmaker (de.uka.ilkd.stubmaker) * Date: Wed Nov 26 11:26:00 CET 2014 */ package java.nio.channels; public interface ByteChannel extends java.nio.channels.ReadableByteChannel, java.nio.channels.WritableByteChannel { }
package nl.openweb.hippo.umd.ui; import org.apache.wicket.markup.head.CssHeaderItem; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.model.IModel; import org.apache.wicket.model.StringResourceModel; import org.apache.wicket.request.resource.CssResourceReference; import org.apache.wicket.request.resource.PackageResourceReference; import org.apache.wicket.request.resource.ResourceReference; import org.hippoecm.frontend.plugin.IPluginContext; import org.hippoecm.frontend.plugin.config.IPluginConfig; import org.hippoecm.frontend.plugins.standards.perspective.Perspective; import org.hippoecm.frontend.service.IconSize; public class Umd extends Perspective { private static final long serialVersionUID = 1L; private static final CssResourceReference PERSPECTIVE_SKIN = new CssResourceReference(Umd.class, "user-management-dashboard.css"); public Umd(final IPluginContext context, final IPluginConfig config) { super(context, config); } @Override public IModel<String> getTitle() { return new StringResourceModel("label.title", this, null, "default value", "arg1"); } @Override public ResourceReference getIcon(IconSize type) { return new PackageResourceReference(Umd.class, "user-management-dashboard-" + type.getSize() + ".png"); } @Override public void renderHead(final IHeaderResponse response) { super.renderHead(response); response.render(CssHeaderItem.forReference(PERSPECTIVE_SKIN)); } }
package br.com.infinitytechnology.filmex.fragments; import android.support.v4.app.Fragment; import android.util.Pair; public abstract class FragmentPagination extends Fragment { public abstract void paginationPrevious(); public abstract void paginationNext(); }
package com.theah64.webengine.utils; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; /** * Created by theapache64 on 9/4/16. */ public class RandomString { private static final String randomEngine = "0123456789AaBbCcDdEeFfGgHhIiJjKkLkMmNnOoPpQqRrSsTtUuVvWwXxYyZz"; private static Random random = new Random(); public static String get(final int length) { return getRandomString(length); } private static String getRandomString(final int length) { final StringBuilder apiKeyBuilder = new StringBuilder(); for (int i = 0; i < length; i++) { apiKeyBuilder.append(randomEngine.charAt(random.nextInt(randomEngine.length()))); } return apiKeyBuilder.toString(); } public static String getRandomFilename(final int fileNameLength, final String fileExtension) { return getRandomString(fileNameLength) + fileExtension; } public static String getRandomNumber(long length) { final StringBuilder numBuilder = new StringBuilder(); long loopCount = length / 18; for (int i = 0; i < loopCount; i++) { numBuilder.append(getRandomNumberLimited(18)); } numBuilder.append(getRandomNumberLimited((int) length % 18)); return numBuilder.toString(); } private static String getRandomNumberLimited(int length) { if (length < 1) { throw new IllegalArgumentException("Length must be > 0"); } if (length > 18) { throw new IllegalArgumentException("Length must be < 19"); } String sLowerLimit = length == 1 ? "1" : (1 + String.format("%0" + (length - 1) + "d", 0)); final long lowerLimit = Long.parseLong(sLowerLimit); final long upperLimit = Long.parseLong(sLowerLimit.replaceAll("\\d", "9")); return String.valueOf(ThreadLocalRandom.current().nextLong(lowerLimit, upperLimit + 1)); } }
/* * Copyright (C) 2020 Intel Corporation. All rights reserved. SPDX-License-Identifier: Apache-2.0 */ package com.openiot.cloud.projectcenter.repository.document; import com.openiot.cloud.base.help.ConstDef; import com.openiot.cloud.base.mongo.model.help.UserRole; import lombok.Data; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; @Document(ConstDef.C_USER) @Data public class User { @Id private String id; // id is same with name private String name; private String nickname; private String tel; private String email; private String password; private String location; // should be lastPasswordModificationTime private long time_reg; private String recentProject; private UserRole role; }
// =================================================================================================== // _ __ _ _ // | |/ /__ _| | |_ _ _ _ _ __ _ // | ' </ _` | | _| || | '_/ _` | // |_|\_\__,_|_|\__|\_,_|_| \__,_| // // This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // // Copyright (C) 2006-2018 Kaltura Inc. // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as // published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. // // @ignore // =================================================================================================== package com.kaltura.client.enums; /** * This class was generated using exec.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ public enum BatchJobErrorTypes implements EnumAsInt { APP(0), RUNTIME(1), HTTP(2), CURL(3), KALTURA_API(4), KALTURA_CLIENT(5); private int value; BatchJobErrorTypes(int value) { this.value = value; } @Override public int getValue() { return this.value; } public void setValue(int value) { this.value = value; } public static BatchJobErrorTypes get(Integer value) { if(value == null) { return null; } // goes over BatchJobErrorTypes defined values and compare the inner value with the given one: for(BatchJobErrorTypes item: values()) { if(item.getValue() == value) { return item; } } // in case the requested value was not found in the enum values, we return the first item as default. return BatchJobErrorTypes.values().length > 0 ? BatchJobErrorTypes.values()[0]: null; } }
package com.hehanpeng.framework.cloudhhp.module.core.event; public class Event { public Event() { } }
licensed apache software foundation asf contributor license agreements notice file distributed work additional copyright ownership asf licenses file apache license version license file compliance license copy license http apache org licenses license required applicable law agreed writing software distributed license distributed basis warranties conditions kind express implied license specific language governing permissions limitations license org apache catalina startup java file java util array list arraylist java util list org junit fail org junit org junit org junit rule org junit rules test name testname org apache juli logging log org apache juli logging log factory logfactory base logging support test cases resp ects respects standard conf logging properties configuration file support cleaning temporary files shutdown link add delete on tear down adddeleteonteardown file note logging configuration code catalina base code care property tests tomcat logging base test loggingbasetest log log file temp dir tempdir list file delete on tear down deleteonteardown array list arraylist file executing test method rule test name testname test name testname test name testname helper method returns directory tomcat build res ides resides access resources tomcat deployment examples webapp file get build directory getbuilddirectory file system get property getproperty tomcat test tomcat build tomcatbuild output build helper method returns path temporary directory test runs directory configured link set up setup code catalina base code instance tomcat started store temporary files code work code code web apps webapps code sub directories subdirectories deleted link tear down teardown files directories deleted cleanup register link add delete on tear down adddeleteonteardown file file get temporary directory gettemporarydirectory temp dir tempdir schedule file directory deleted test cleanup param file file directory add delete on tear down adddeleteonteardown file file delete on tear down deleteonteardown add file set up setup exception create catalina base directory temp dir tempdir file system get property getproperty tomcat test temp output tmp temp dir tempdir mkdirs temp dir tempdir is directory isdirectory fail unable create temporary directory test system set property setproperty catalina base temp dir tempdir get absolute path getabsolutepath configure logging system set property setproperty java util logging manager org apache juli class loader log manager classloaderlogmanager system set property setproperty java util logging config file file get build directory getbuilddirectory conf logging properties to string tostring log instance logging configured log log factory logfactory get log getlog get class getclass log info starting test test name testname get method name getmethodname tear down teardown exception file file delete on tear down deleteonteardown expand war expandwar delete file delete on tear down deleteonteardown clear
//,temp,CuratorService.java,651,663,temp,CuratorService.java,502,518 //,3 public class xxx { public void zkUpdate(String path, byte[] data) throws IOException { Preconditions.checkArgument(data != null, "null data"); checkServiceLive(); path = createFullPath(path); try { if (LOG.isDebugEnabled()) { LOG.debug("Updating {} with {} bytes", path, data.length); } curator.setData().forPath(path, data); } catch (Exception e) { throw operationFailure(path, "update()", e); } } };
package com.rockwell.crudui.form.impl.form.factory; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Stream; import com.rockwell.crudui.crud.CrudOperation; import com.rockwell.crudui.form.AbstractAutoGeneratedCrudFormFactory; import com.rockwell.crudui.form.AfterAutoCreateFormEvent; import com.vaadin.flow.component.ClickEvent; import com.vaadin.flow.component.Component; import com.vaadin.flow.component.ComponentEventListener; import com.vaadin.flow.component.HasValueAndElement; import com.vaadin.flow.component.button.Button; import com.vaadin.flow.component.formlayout.FormLayout; import com.vaadin.flow.component.orderedlayout.FlexComponent.Alignment; import com.vaadin.flow.component.orderedlayout.VerticalLayout; public class DefaultCrudFormFactory<T> extends AbstractAutoGeneratedCrudFormFactory<T> { /** * Comment for <code>serialVersionUID</code> */ private static final long serialVersionUID = -383399061079066282L; private FormLayout.ResponsiveStep[] responsiveSteps; protected FormLayout formLayout; protected Map<CrudOperation,AfterAutoCreateFormEvent<T>> afterAutoCreateFormEvents = new HashMap<>(); public DefaultCrudFormFactory(Class<T> domainType) { this(domainType, (FormLayout.ResponsiveStep[]) null); } public DefaultCrudFormFactory(Class<T> domainType, FormLayout.ResponsiveStep... responsiveSteps) { super(domainType); if (responsiveSteps != null) { this.responsiveSteps = responsiveSteps; } else { this.responsiveSteps = new FormLayout.ResponsiveStep[]{ new FormLayout.ResponsiveStep("0em", 1), new FormLayout.ResponsiveStep("25em", 2) }; } afterAutoCreateFormEvents.put(CrudOperation.READ, (f,o)->{}); afterAutoCreateFormEvents.put(CrudOperation.ADD, (f,o)->{}); afterAutoCreateFormEvents.put(CrudOperation.UPDATE, (f,o)->{}); afterAutoCreateFormEvents.put(CrudOperation.DELETE, (f,o)->{}); } public void setAfterAutoCreateFormEvents(CrudOperation operation, AfterAutoCreateFormEvent<T> event) { this.afterAutoCreateFormEvents.put(operation, event); } public void setAfterAutoCreateFormEvents(AfterAutoCreateFormEvent<T> event) { Stream.of(CrudOperation.values()).forEach(item->setAfterAutoCreateFormEvents(item,event)); } public FormLayout getFormLayout() { return formLayout; } public Optional<Component> getFormItem(String property) { Stream<Component> stream = formLayout.getChildren(); if(stream != null) { return stream.filter(item->item.getElement().getAttribute("fieldname").equals(property)).findFirst(); } return Optional.empty(); } @Override public Component buildNewForm(CrudOperation operation, T domainObject, boolean readOnly, ComponentEventListener<ClickEvent<Button>> cancelButtonClickListener, ComponentEventListener<ClickEvent<Button>> operationButtonClickListener) { formLayout = new FormLayout(); formLayout.setSizeFull(); formLayout.setResponsiveSteps(responsiveSteps); @SuppressWarnings("rawtypes") List<HasValueAndElement> fields = buildFields(operation, domainObject, readOnly); fields.stream() .forEach(field -> formLayout.getElement().appendChild(field.getElement())); if(otherFieldswithValidor.get(operation) != null) { otherFieldswithValidor.get(operation).forEach(field->formLayout.getElement().appendChild(field.getField().getElement())); } Component footerLayout = buildFooter(operation, domainObject, cancelButtonClickListener, operationButtonClickListener); com.vaadin.flow.component.orderedlayout.VerticalLayout mainLayout = new VerticalLayout(formLayout, footerLayout); mainLayout.setFlexGrow(1, formLayout); mainLayout.setHorizontalComponentAlignment(Alignment.END, footerLayout); mainLayout.setMargin(false); mainLayout.setPadding(false); mainLayout.setSpacing(true); //after form auto creation,do something here afterAutoCreateFormEvents.get(operation).perfom(formLayout,domainObject);; return mainLayout; } }
/* * The baseCode project * * Copyright (c) 2011 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package ubic.basecode.math.linearmodels; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.text.DecimalFormat; import java.text.NumberFormat; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.zip.GZIPInputStream; import org.apache.commons.math3.distribution.FDistribution; import org.junit.Test; import cern.colt.list.DoubleArrayList; import cern.colt.matrix.DoubleMatrix1D; import cern.colt.matrix.DoubleMatrix2D; import cern.colt.matrix.impl.DenseDoubleMatrix2D; import cern.colt.matrix.linalg.Algebra; import cern.jet.math.Functions; import ubic.basecode.dataStructure.matrix.DenseDoubleMatrix; import ubic.basecode.dataStructure.matrix.DenseDoubleMatrix1D; import ubic.basecode.dataStructure.matrix.DoubleMatrix; import ubic.basecode.dataStructure.matrix.ObjectMatrix; import ubic.basecode.dataStructure.matrix.ObjectMatrixImpl; import ubic.basecode.dataStructure.matrix.StringMatrix; import ubic.basecode.io.reader.DoubleMatrixReader; import ubic.basecode.io.reader.StringMatrixReader; import ubic.basecode.io.writer.MatrixWriter; import ubic.basecode.math.DescriptiveWithMissing; import ubic.basecode.math.MatrixStats; /** * @author paul */ public class LeastSquaresFitTest { /** * @throws Exception */ @Test public void testLSFOneContinuousWithMissing3() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/example.madata.withmissing.small.txt" ) ); ObjectMatrix<String, String, Object> design = new ObjectMatrixImpl<>( 9, 1 ); design.set( 0, 0, 0.12 ); design.set( 1, 0, 0.24 ); design.set( 2, 0, 0.48 ); design.set( 3, 0, 0.96 ); design.set( 4, 0, 0.12 ); design.set( 5, 0, 0.24 ); design.set( 6, 0, 0.48 ); design.set( 7, 0, 0.96 ); design.set( 8, 0, 0.96 ); design.setRowNames( testMatrix.getColNames() ); design.addColumnName( "Value" ); LeastSquaresFit fit = new LeastSquaresFit( design, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); LinearModelSummary s = sums.get( "228980_at" ); // has missing assertNotNull( s ); assertEquals( 0.1495, s.getF(), 0.01 ); assertEquals( 0.7123, s.getP(), 0.001 ); assertEquals( 10.9180, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.712, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); // pvalue assertEquals( 6, s.getResidualDof().intValue() ); GenericAnovaResult a = s.getAnova(); assertEquals( 0.1495, a.getMainEffectF( "Value" ), 0.0001 ); assertEquals( 1, a.getMainEffectDof( "Value" ).intValue() ); assertEquals( 6, a.getResidualDf().intValue() ); FDistribution fd = new FDistribution( 1, 6 ); double p = 1.0 - fd.cumulativeProbability( 0.1495 ); assertEquals( 0.7123, p, 0.0001 ); assertEquals( 0.7123, a.getMainEffectP( "Value" ), 0.0001 ); s = sums.get( "1553129_at" ); assertNotNull( s ); assertEquals( 2.095, s.getF(), 0.01 ); assertEquals( 0.1911, s.getP(), 0.001 ); assertEquals( 3.78719, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.191, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); // this ordering might change? a = s.getAnova(); assertNotNull( a ); assertEquals( 0.1911, a.getMainEffectP( "Value" ), 0.0001 ); s = fit.summarize( 14 ); assertNotNull( s ); assertEquals( "214502_at", s.getKey() );// has missing assertEquals( 1.992, s.getF(), 0.01 ); assertEquals( 0.2172, s.getP(), 0.001 ); assertEquals( 4.2871, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.217, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); s = sums.get( "232018_at" ); assertNotNull( s ); assertEquals( 1.381, s.getF(), 0.01 ); assertEquals( 0.2783, s.getP(), 0.001 ); assertEquals( 6.6537, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.278, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); a = s.getAnova(); assertNotNull( a ); assertEquals( 0.2783, a.getMainEffectP( "Value" ), 0.0001 ); s = sums.get( "228980_at" ); // has missing assertNotNull( s ); assertEquals( 0.1495, s.getF(), 0.01 ); assertEquals( 0.7123, s.getP(), 0.001 ); assertEquals( 10.9180, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.712, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); a = s.getAnova(); assertNotNull( a ); assertEquals( 0.7123, a.getMainEffectP( "Value" ), 0.0001 ); } /** * @throws Exception */ @Test public void testLSFThreeLevelsOnecontinous2() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/example.madata.small.txt" ) ); ObjectMatrix<String, String, Object> design = new ObjectMatrixImpl<>( 9, 2 ); design.set( 0, 0, "A" ); design.set( 1, 0, "A" ); design.set( 2, 0, "A" ); design.set( 3, 0, "B" ); design.set( 4, 0, "B" ); design.set( 5, 0, "B" ); design.set( 6, 0, "C" ); design.set( 7, 0, "C" ); design.set( 8, 0, "C" ); design.set( 0, 1, 0.12 ); design.set( 1, 1, 0.24 ); design.set( 2, 1, 0.48 ); design.set( 3, 1, 0.96 ); design.set( 4, 1, 0.12 ); design.set( 5, 1, 0.24 ); design.set( 6, 1, 0.48 ); design.set( 7, 1, 0.96 ); design.set( 8, 1, 0.96 ); design.setRowNames( testMatrix.getColNames() ); design.addColumnName( "Factor" ); design.addColumnName( "Value" ); LeastSquaresFit fit = new LeastSquaresFit( design, testMatrix ); DoubleMatrix2D coeffs = fit.getCoefficients(); assertEquals( 3.7458080, coeffs.get( 0, 0 ), 0.0001 ); assertEquals( -0.4388889, coeffs.get( 1, 2 ), 0.0001 ); assertEquals( 0.5709091, coeffs.get( 2, 10 ), 0.0001 ); assertEquals( 0.04856061, coeffs.get( 2, 18 ), 0.0001 ); assertEquals( -1.1363636, coeffs.get( 3, 10 ), 0.0001 ); assertEquals( 0.11174242, coeffs.get( 3, 18 ), 0.0001 ); DoubleMatrix2D fitted = fit.getFitted(); assertEquals( 3.764747, fitted.get( 0, 0 ), 0.0001 ); assertEquals( 6.043990, fitted.get( 1, 3 ), 0.0001 ); assertEquals( 10.858586, fitted.get( 7, 2 ), 0.0001 ); assertEquals( 6.307879, fitted.get( 18, 8 ), 0.0001 ); List<GenericAnovaResult> anova = fit.anova(); assertEquals( 19, anova.size() ); } /** * @throws Exception */ @Test public void testLSFThreeLevelsOneContinuousWithMissing3() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/example.madata.withmissing.small.txt" ) ); ObjectMatrix<String, String, Object> design = new ObjectMatrixImpl<>( 9, 2 ); design.set( 0, 0, "A" ); design.set( 1, 0, "A" ); design.set( 2, 0, "A" ); design.set( 3, 0, "B" ); design.set( 4, 0, "B" ); design.set( 5, 0, "B" ); design.set( 6, 0, "C" ); design.set( 7, 0, "C" ); design.set( 8, 0, "C" ); design.set( 0, 1, 0.12 ); design.set( 1, 1, 0.24 ); design.set( 2, 1, 0.48 ); design.set( 3, 1, 0.96 ); design.set( 4, 1, 0.12 ); design.set( 5, 1, 0.24 ); design.set( 6, 1, 0.48 ); design.set( 7, 1, 0.96 ); design.set( 8, 1, 0.96 ); design.setRowNames( testMatrix.getColNames() ); design.addColumnName( "Factor" ); design.addColumnName( "Value" ); LeastSquaresFit fit = new LeastSquaresFit( design, new DenseDoubleMatrix2D( testMatrix.asArray() ) ); DoubleMatrix2D coeffs = fit.getCoefficients(); assertEquals( 3.7458080, coeffs.get( 0, 0 ), 0.0001 ); assertEquals( -0.4388889, coeffs.get( 1, 2 ), 0.0001 ); assertEquals( 0.5709091, coeffs.get( 2, 10 ), 0.0001 ); assertEquals( 0.04856061, coeffs.get( 2, 18 ), 0.0001 ); assertEquals( -1.1363636, coeffs.get( 3, 10 ), 0.0001 ); assertEquals( 0.11174242, coeffs.get( 3, 18 ), 0.0001 ); DoubleMatrix2D fitted = fit.getFitted(); assertEquals( 3.764747, fitted.get( 0, 0 ), 0.0001 ); assertEquals( 6.043990, fitted.get( 1, 3 ), 0.0001 ); assertEquals( 10.8333, fitted.get( 7, 2 ), 0.0001 ); assertEquals( 6.307879, fitted.get( 18, 8 ), 0.0001 ); List<GenericAnovaResult> anova = fit.anova(); assertEquals( 19, anova.size() ); } /** * @throws Exception */ @Test public void testLSFTwoLevels() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/example.madata.small.txt" ) ); ObjectMatrix<String, String, Object> design = new ObjectMatrixImpl<>( 9, 1 ); design.set( 0, 0, "A" ); design.set( 1, 0, "A" ); design.set( 2, 0, "A" ); design.set( 3, 0, "A" ); design.set( 4, 0, "B" ); design.set( 5, 0, "B" ); design.set( 6, 0, "B" ); design.set( 7, 0, "B" ); design.set( 8, 0, "B" ); design.setRowNames( testMatrix.getColNames() ); design.addColumnName( "Factor" ); LeastSquaresFit fit = new LeastSquaresFit( design, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); LinearModelSummary s = sums.get( "1553129_at" ); assertEquals( 0.182999, s.getF(), 0.0001 ); assertEquals( 0.6817, s.getP(), 0.001 ); assertEquals( 3.84250, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.682, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); // pvalue. Double[] effects = s.getEffects(); assertEquals( -11.58333, effects[0], 0.0001 ); // hm. assertEquals( 0.04999, effects[1], 0.0001 ); //hm Double[] stdevUnscaled = s.getStdevUnscaled(); // assertEquals( 0.5, stdevUnscaled[0], 0.0001 ); assertEquals( 0.6708203932, stdevUnscaled[1], 0.0001 ); Double sigma = s.getSigma(); assertEquals( 0.11673841331, sigma, 0.0001 ); s = sums.get( "232018_at" ); assertEquals( -18.9866667, s.getEffects()[0], 0.0001 ); // hm. assertEquals( 0.1714319, s.getEffects()[1], 0.0001 ); //hm assertEquals( 0.07879, s.getF(), 0.01 ); assertEquals( 0.787, s.getP(), 0.001 ); assertEquals( 6.2650, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.787, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); sigma = s.getSigma(); assertEquals( 0.61072556381, sigma, 0.0001 ); } /** * Many missing values; Two factors, two levels + interaction. * * @throws Exception */ @Test public void testLSFTwoLevels2() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/lmtest1.dat.manymissing.txt" ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass() .getResourceAsStream( "/data/lmtest1.des.txt" ) ); DesignMatrix d = new DesignMatrix( sampleInfo, true ); d.addInteraction(); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 100, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); Double interactionEffectP = a.getInteractionEffectP(); assertNotNull( interactionEffectP ); } } /** * @throws Exception */ @Test public void testLSFTwoLevels3() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/anova-test-data.txt" ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/anova-test-des.txt" ) ); DesignMatrix d = new DesignMatrix( sampleInfo, true ); d.addInteraction( "factor1", "factor2" ); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 100, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); Double interactionEffectP = a.getInteractionEffectP(); assertNotNull( interactionEffectP ); } assertEquals( 0.0048, sums.get( "probe_4" ).getMainEffectP( "factor1" ), 0.0001 ); assertEquals( 5.158e-10, sums.get( "probe_10" ).getMainEffectP( "factor1" ), 1e-12 ); assertEquals( 0.6888, sums.get( "probe_98" ).getMainEffectP( "factor2" ), 1e-4 ); assertEquals( 0.07970, sums.get( "probe_10" ).getMainEffectP( "factor2" ), 1e-4 ); } /** * @throws Exception */ @Test public void testLSFTwoLevelsOneContinuous() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/example.madata.small.txt" ) ); ObjectMatrix<String, String, Object> design = new ObjectMatrixImpl<>( 9, 2 ); design.set( 0, 0, "A" ); design.set( 1, 0, "A" ); design.set( 2, 0, "A" ); design.set( 3, 0, "A" ); design.set( 4, 0, "B" ); design.set( 5, 0, "B" ); design.set( 6, 0, "B" ); design.set( 7, 0, "B" ); design.set( 8, 0, "B" ); design.set( 0, 1, 0.12 ); design.set( 1, 1, 0.24 ); design.set( 2, 1, 0.48 ); design.set( 3, 1, 0.96 ); design.set( 4, 1, 0.12 ); design.set( 5, 1, 0.24 ); design.set( 6, 1, 0.48 ); design.set( 7, 1, 0.96 ); design.set( 8, 1, 0.96 ); design.setRowNames( testMatrix.getColNames() ); design.addColumnName( "Factor" ); design.addColumnName( "Value" ); LeastSquaresFit fit = new LeastSquaresFit( design, testMatrix ); DoubleMatrix2D coeffs = fit.getCoefficients(); assertEquals( 3.77868, coeffs.get( 0, 0 ), 0.0001 ); assertEquals( 0.24476, coeffs.get( 1, 2 ), 0.0001 ); assertEquals( -0.680449, coeffs.get( 2, 10 ), 0.0001 ); assertEquals( 0.114084, coeffs.get( 2, 18 ), 0.0001 ); DoubleMatrix2D fitted = fit.getFitted(); assertEquals( 3.795698, fitted.get( 0, 0 ), 0.0001 ); assertEquals( 5.497165, fitted.get( 1, 3 ), 0.0001 ); assertEquals( 10.879917, fitted.get( 7, 2 ), 0.0001 ); assertEquals( 6.346546, fitted.get( 18, 8 ), 0.0001 ); List<GenericAnovaResult> anova = fit.anova(); assertEquals( 19, anova.size() ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); LinearModelSummary s = sums.get( "1553129_at" ); assertEquals( 0.9389, s.getF(), 0.01 ); assertEquals( 0.4418, s.getP(), 0.001 ); assertEquals( 3.77868, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.810, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); // this ordering might change? GenericAnovaResult a = s.getAnova(); assertEquals( 0.2429, a.getMainEffectP( "Value" ), 0.0001 ); s = sums.get( "232018_at" ); assertEquals( 0.7167, s.getF(), 0.01 ); assertEquals( 0.5259, s.getP(), 0.001 ); assertEquals( 6.5712, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.664, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); a = s.getAnova(); assertEquals( 0.2893, a.getMainEffectP( "Value" ), 0.0001 ); // 232018_at // based on rstudent() in R. DoubleMatrix2D studentizedResiduals = fit.getStudentizedResiduals(); // log.info( studentizedResiduals.viewRow( 10 ) ); double[] expectedStudentizedResiduals = new double[] { -0.34655041, 1.46251738, -0.61403124, -0.34663812, -1.51245468, 0.06875469, 1.45818880, 1.02811044, -1.31696150 }; for ( int i = 0; i < 9; i++ ) { assertEquals( expectedStudentizedResiduals[i], studentizedResiduals.viewRow( 10 ).get( i ), 0.001 ); } // assertEquals( 1.1, DescriptiveWithMissing.variance( new DoubleArrayList( expectedStudentizedResiduals ) ), // 0.1 ); assertEquals( 1.1, DescriptiveWithMissing.variance( new DoubleArrayList( studentizedResiduals.viewRow( 10 ).toArray() ) ), 0.1 ); // 1553129_at // based on rstudent() in R. studentizedResiduals = fit.getStudentizedResiduals(); expectedStudentizedResiduals = new double[] { 0.46128657, -5.49429390, 0.84157385, 1.10053286, 1.10538546, -0.01706794, -0.05318259, -0.56926585, -0.35107932 }; for ( int i = 0; i < 9; i++ ) { assertEquals( expectedStudentizedResiduals[i], studentizedResiduals.viewRow( 0 ).get( i ), 0.001 ); } } /** * Weighted least squares test for 2D matrices * * @throws Exception */ @Test public void testMatrixWeightedRegress() throws Exception { DoubleMatrix2D dat = new DenseDoubleMatrix2D( new double[][] { { 1, 2, 3, 4, 5 }, { 1, 1, 6, 3, 2 } } ); DoubleMatrix2D des = new DenseDoubleMatrix2D( new double[][] { { 1, 1, 1, 1, 1 }, { 1, 2, 2, 3, 3 }, { 2, 1, 5, 3, 4 } } ); DoubleMatrix2D w = dat.copy(); w.assign( dat ); Algebra solver = new Algebra(); des = solver.transpose( des ); LeastSquaresFit fit = new LeastSquaresFit( des, dat, w ); /* * TODO R code please */ // FIXME why is precision of these tests so low? It was 0.1! I changed it to 0.001 // coefficients DoubleMatrix2D actuals = solver.transpose( fit.getCoefficients() ); double[][] expected = new double[][] { { -1.7070, 1.7110, 0.3054 }, { 0.2092, -0.6642, 1.3640 } }; for ( int i = 0; i < expected.length; i++ ) { assertArrayEquals( expected[i], actuals.viewRow( i ).toArray(), 0.001 ); } // fitted actuals = fit.getFitted(); expected = new double[][] { { 0.6151, 2.0210, 3.2430, 4.3430, 4.6490 }, { 2.273, 0.245, 5.701, 2.309, 3.673 } }; for ( int i = 0; i < expected.length; i++ ) { assertArrayEquals( expected[i], actuals.viewRow( i ).toArray(), 0.001 ); } // residuals actuals = fit.getResiduals(); expected = new double[][] { { 0.38490, -0.02092, -0.24270, -0.34310, 0.35150 }, { -1.2730, 0.7550, 0.2986, 0.6910, -1.6730 } }; for ( int i = 0; i < expected.length; i++ ) { assertArrayEquals( expected[i], actuals.viewRow( i ).toArray(), 0.001 ); } } /** * Has a lot of missing values. * * @throws Exception */ @Test public void testOneWayAnova() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/anova-test-data.txt" ) ); ObjectMatrix<String, String, Object> design = new ObjectMatrixImpl<>( 8, 1 ); for ( int i = 0; i < 8; i++ ) { design.set( i, 0, "A" + i % 3 ); } design.addColumnName( "Factor1" ); DesignMatrix d = new DesignMatrix( design, true ); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 100, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); Double interactionEffectP = a.getInteractionEffectP(); assertNull( interactionEffectP ); } LinearModelSummary sum4 = sums.get( "probe_4" ); assertNotNull( sum4.getContrastCoefficients() ); assertEquals( 0.6531, sum4.getP(), 0.0001 ); assertEquals( 0.2735, sum4.getF(), 0.0001 ); assertEquals( 0.2735, sum4.getAnova().getMainEffectF( "Factor1" ), 0.0001 ); assertEquals( 2, sum4.getAnova().getResidualDf().intValue() ); assertEquals( 1, sum4.getAnova().getMainEffectDof( "Factor1" ).intValue() ); assertEquals( 0.6531, sum4.getMainEffectP( "Factor1" ), 0.0001 ); LinearModelSummary sum21 = sums.get( "probe_21" ); assertNotNull( sum21.getContrastCoefficients() ); assertEquals( 0.6492, sum21.getP(), 0.0001 ); assertEquals( 0.4821, sum21.getF(), 0.0001 ); assertEquals( 0.4821, sum21.getAnova().getMainEffectF( "Factor1" ), 0.0001 ); assertEquals( 4, sum21.getAnova().getResidualDf().intValue() ); assertEquals( 2, sum21.getAnova().getMainEffectDof( "Factor1" ).intValue() ); assertEquals( 0.6492, sum21.getMainEffectP( "Factor1" ), 0.0001 ); LinearModelSummary sum98 = sums.get( "probe_98" ); assertNotNull( sum98.getContrastCoefficients() ); assertEquals( 0.1604, sum98.getP(), 0.0001 ); assertEquals( 2.993, sum98.getF(), 0.0001 ); assertEquals( 4, sum98.getAnova().getResidualDf().intValue() ); assertEquals( 2, sum98.getAnova().getMainEffectDof( "Factor1" ).intValue() ); assertEquals( 2.9931, sum98.getAnova().getMainEffectF( "Factor1" ).doubleValue(), 0.0001 ); assertEquals( 0.1604, sum98.getMainEffectP( "Factor1" ), 1e-4 ); LinearModelSummary sum10 = sums.get( "probe_10" ); assertNotNull( sum10.getContrastCoefficients() ); assertEquals( 0.8014, sum10.getP(), 0.0001 ); assertEquals( 0.2314, sum10.getF(), 0.0001 ); assertEquals( 5, sum10.getAnova().getResidualDf().intValue() ); assertEquals( 2, sum10.getAnova().getMainEffectDof( "Factor1" ).intValue() ); assertEquals( 0.8014, sum10.getMainEffectP( "Factor1" ), 1e-4 ); /* * Painful case follows. Not full rank due to missing values: */ LinearModelSummary sum60 = sums.get( "probe_60" ); assertNotNull( sum60.getContrastCoefficients() ); /* * See lmtests.R * * Note that using lmFit and lm give different results because (apparently) of the difference in strategy * dealing * with missing values. In lm, the factors are subset; in lmFit, the design matrix is subset. We do the latter. * * lmFit(dat, model.matrix(~ factor3)) * * vs * * lm(t(dat["probe_60",]) ~ factor3) */ assertEquals( 9.00145, sum60.getContrastCoefficients().get( 0, 0 ), 0.0001 ); // same as lmFit; R lm gives 8.9913; lm.fit gives tehse values. assertEquals( -0.01020, sum60.getContrastCoefficients().get( 1, 0 ), 0.0001 ); // same as lmFit; R lm gives +0.0102 assertEquals( Double.NaN, sum60.getContrastCoefficients().get( 2, 0 ), 0.0001 ); // good ... /* * This is the value of R-squared from lm(). lmFit doesn't give us this, but... */ // assertEquals( -0.4996, sum60.getAdjRSquared(), 1e-5 ); // using results of lm() // anova(object) // Analysis of Variance Table // // Response: t(dat["probe_60", ]) // Df Sum Sq Mean Sq F value Pr(>F) // factor3 1 0.00010 0.000104 5e-04 0.9846 <- not quite // Residuals 2 0.44135 0.220675 <- we get this assertEquals( 2, sum60.getResidualDof().intValue() ); assertEquals( 1, sum60.getNumeratorDof().intValue() ); assertEquals( 2, sum60.getAnova().getResidualDf().intValue() ); assertEquals( 1, sum60.getAnova().getMainEffectDof( "Factor1" ).intValue() ); // The value lm gives is 0.004715; for limma, topTableF gives NA and no pvalue. // However, for this case, this is actually the t-statistic (two groups) and the right value is 0.022 (from lm) assertEquals( 0.0004715, sum60.getF(), 1e-7 ); // on 1 and 2 dof, p-value: 0.9846 assertEquals( 0.9846482, sum60.getP(), 1e-5 ); // assertEquals( 0.9846482, sum60.getMainEffectP( "Factor1" ), 1e-5 ); // Result for topTable(eBayes(fit), coef=2) // logFC AveExpr t P.Value adj.P.Val B // probe_60 -0.01020000 8.996350 -0.02338077 9.831252e-01 0.983125229 -7.4711642 // topTableF(eBayes(o2), number = 100) // X.Intercept. factor3v factor3w_base AveExpr F P.Value adj.P.Val // probe_60 9.001450 -0.01020000 NA 8.996350 NA NA NA // Using lm(formula = t(dat["probe_60", ]) ~ factor3) // Call: // lm(formula = t(dat["probe_60", ]) ~ factor3) // // Residuals: // X0b.bioassay.0b. X1a.bioassay.1a. X2a.bioassay.2a. X2b.bioassay.2b. // -0.1209 -0.4539 0.1209 0.4539 // // Coefficients: // Estimate Std. Error t value Pr(>|t|) // (Intercept) 8.9913 0.3322 27.068 0.00136 ** // factor3w_base 0.0102 0.4698 0.022 0.98465 // --- // Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1 // // Residual standard error: 0.4698 on 2 degrees of freedom // (4 observations deleted due to missingness) // Multiple R-squared: 0.0002357, Adjusted R-squared: -0.4996 // F-statistic: 0.0004715 on 1 and 2 DF, p-value: 0.9846 // results of using lm.fit(X,y) /// > X // (Intercept) factor3v factor3w_base // 2 1 1 0 // 3 1 0 1 // 5 1 1 0 // 6 1 0 1 // > y // [1] 8.8704 8.5475 9.1121 9.4554 // > lm.fit(X,y) // $coefficients // (Intercept) factor3v factor3w_base // 9.00145 -0.01020 NA // // $residuals // [1] -0.12085 -0.45395 0.12085 0.45395 // // $effects // (Intercept) factor3v // -17.9927 -0.0102 0.0784 0.6597 // // $rank // [1] 2 // // $fitted.values // [1] 8.99125 9.00145 8.99125 9.00145 // // $assign // NULL // // $qr // $qr // (Intercept) factor3v factor3w_base // 2 -2.0 -1.0000000 -1.000000e+00 // 3 0.5 1.0000000 -1.000000e+00 // 5 0.5 -0.3333333 -1.110223e-16 // 6 0.5 0.6666667 0.000000e+00 // // $qraux // [1] 1.500000 1.666667 2.000000 // // $pivot // [1] 1 2 3 // // $tol // [1] 1e-07 // // $rank // [1] 2 // // attr(,"class") // [1] "qr" // // $df.residual // [1] 2 } /** * @throws Exception */ @Test public void testSingular() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f .read( this.getClass().getResourceAsStream( "/data/lmtest2.dat.txt" ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass() .getResourceAsStream( "/data/lmtest2.des.txt" ) ); DesignMatrix d = new DesignMatrix( sampleInfo, true ); assertEquals( 9, d.getMatrix().columns() ); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 81, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); } LinearModelSummary s = sums.get( "A01157cds_s_at" ); assertNotNull( s.getContrastCoefficients() ); assertEquals( 7.3740000, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); // assertEquals( 0.1147667, s.getCoefficients().get( 1, 3 ), 0.001 ); assertEquals( 6, s.getResidualDof().intValue() ); assertEquals( 7, s.getNumeratorDof().intValue() ); assertEquals( 0.8634, s.getF(), 0.01 ); assertEquals( 0.5795, s.getP(), 0.001 ); } /** * Originally causes failures during summarization step. There are two pivoted columns. * * @throws Exception */ @Test public void testSingular2() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/1027_GSE6189.data.test.txt" ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/1027_GSE6189_expdesign.data.txt" ) ); DesignMatrix d = new DesignMatrix( sampleInfo, true ); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix.getRowRange( 0, 0 ) ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 1, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); } LinearModelSummary s = sums.get( "1367452_at" ); assertNotNull( s ); assertNotNull( s.getContrastCoefficients() ); // log.info( s.getCoefficients() ); // log.info( s.getAnova() ); // our model matrix ends up with different coefficients than R which are // double[] rcoef = new double[] { 14.96355, 0.14421, -0.11525, 0.24257, Double.NaN, 0.04093, 0.06660, // Double.NaN }; // here are the coefs we get in R if we use the exact model matrix we get drom our DesignMatrix double[] coef = new double[] { 15.10776244, -0.01689300, 0.09835841, -0.20163964, Double.NaN, -0.04092962, Double.NaN, 0.06660370 }; for ( int i = 0; i < s.getContrastCoefficients().rows(); i++ ) { assertEquals( coef[i], s.getContrastCoefficients().get( i, 0 ), 0.0001 ); } } @Test public void testThreeWaySingular() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/1064_GSE7863.data.test.txt" ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/1064_GSE7863_expdesign.data.test.txt" ) ); DesignMatrix d = new DesignMatrix( sampleInfo, true ); assertEquals( 5, d.getMatrix().columns() ); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 416, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); } LinearModelSummary s = sums.get( "1415696_at" ); assertNotNull( s.getContrastCoefficients() ); assertEquals( 0.000794, s.getContrastCoefficients().get( 2, 3 ), 0.001 ); assertEquals( 11, s.getResidualDof().intValue() ); assertEquals( 4, s.getNumeratorDof().intValue() ); assertEquals( 24.38, s.getF(), 0.01 ); assertEquals( 2.025e-05, s.getP(), 0.001 ); GenericAnovaResult anova = s.getAnova(); assertEquals( 29.0386, anova.getMainEffectF( "Treatment" ), 0.0001 ); s = sums.get( "1415837_at" ); assertNotNull( s.getContrastCoefficients() ); assertEquals( 11, s.getResidualDof().intValue() ); assertEquals( 4, s.getNumeratorDof().intValue() ); assertEquals( 22.72, s.getF(), 0.01 ); assertEquals( 2.847e-05, s.getP(), 0.001 ); anova = s.getAnova(); assertEquals( 6.5977, anova.getMainEffectF( "Treatment" ), 0.0001 ); s = sums.get( "1416179_a_at" ); assertNotNull( s.getContrastCoefficients() ); assertEquals( 11, s.getResidualDof().intValue() ); assertEquals( 4, s.getNumeratorDof().intValue() ); assertEquals( 25.14, s.getF(), 0.01 ); assertEquals( 1.743e-05, s.getP(), 0.001 ); anova = s.getAnova(); assertEquals( 38.411, anova.getMainEffectF( "Treatment" ), 0.001 ); s = sums.get( "1456759_at" ); assertNotNull( s.getContrastCoefficients() ); assertEquals( 11, s.getResidualDof().intValue() ); assertEquals( 4, s.getNumeratorDof().intValue() ); assertEquals( 7.903, s.getF(), 0.01 ); assertEquals( 0.002960, s.getP(), 0.001 ); anova = s.getAnova(); assertEquals( 10.3792, anova.getMainEffectF( "Treatment" ), 0.001 ); assertEquals( 2.6253, anova.getMainEffectF( "Genotype" ), 0.001 ); } /** * Sanity check. * * @throws Exception */ @Test public void testTwoWayAnovaUnfittable() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass() .getResourceAsStream( "/data/lmtest10.dat.txt" ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/lmtest10.des.txt" ) ); DesignMatrix d = new DesignMatrix( sampleInfo, true ); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 1, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); assertEquals( Double.NaN, a.getMainEffectP( "CellType" ), 0.0001 ); assertEquals( Double.NaN, a.getMainEffectP( "SamplingTimePoint" ), 0.0001 ); } DoubleMatrix2D coefficients = fit.getCoefficients(); DoubleMatrix2D residuals = fit.getResiduals(); assertEquals( 2.238, coefficients.get( 0, 0 ), 0.0001 ); // mean. assertEquals( 0.0, coefficients.get( 1, 0 ), 0.0001 ); for ( int i = 0; i < residuals.rows(); i++ ) { assertEquals( 0.0, residuals.get( 0, i ), 0.00001 ); } } /** * Check for problem reported by TF -- Gemma gives slightly different result. Problem is not at this level. * * @throws Exception */ @Test public void testTwoWayAnovaWithInteractions() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/GSE8441_expmat_8probes.txt" ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/606_GSE8441_expdesign.data.txt" ) ); DesignMatrix d = new DesignMatrix( sampleInfo, true ); d.addInteraction(); assertEquals( 4, d.getMatrix().columns() ); assertEquals( 22, testMatrix.columns() ); LeastSquaresFit fit = new LeastSquaresFit( d, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); assertEquals( 8, sums.size() ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult a = lms.getAnova(); assertNotNull( a ); } LinearModelSummary s = sums.get( "217757_at" ); GenericAnovaResult anova = s.getAnova(); assertNotNull( s.getContrastCoefficients() ); assertEquals( 0.763, s.getContrastCoefficients().get( 2, 3 ), 0.001 ); assertEquals( 18, s.getResidualDof().intValue() ); assertEquals( 3, s.getNumeratorDof().intValue() ); assertEquals( 0.299, s.getF(), 0.01 ); assertEquals( 0.8257, s.getP(), 0.001 ); assertEquals( 0.5876, anova.getMainEffectF( "Treatment" ), 0.0001 ); assertEquals( 0.5925, anova.getInteractionEffectP(), 0.001 ); s = sums.get( "202851_at" ); anova = s.getAnova(); assertNotNull( s.getContrastCoefficients() ); assertEquals( 0.787, s.getContrastCoefficients().get( 2, 3 ), 0.001 ); assertEquals( 18, s.getResidualDof().intValue() ); assertEquals( 3, s.getNumeratorDof().intValue() ); assertEquals( 0.1773, s.getF(), 0.01 ); assertEquals( 0.9104, s.getP(), 0.001 ); assertEquals( 0.3777, anova.getMainEffectF( "Treatment" ), 0.0001 ); assertEquals( 0.9956, anova.getInteractionEffectP(), 0.001 ); } /** * No missing values; Two-way ANOVA with interaction, PLUS a continuous covariate. * * @throws Exception */ @Test public void testTwoWayTwoLevelsOneContinousInteractionC() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/example.madata.small.txt" ) ); ObjectMatrix<String, String, Object> design = new ObjectMatrixImpl<>( 9, 3 ); design.set( 0, 0, "A" ); design.set( 1, 0, "A" ); design.set( 2, 0, "A" ); design.set( 3, 0, "A" ); design.set( 4, 0, "B" ); design.set( 5, 0, "B" ); design.set( 6, 0, "B" ); design.set( 7, 0, "B" ); design.set( 8, 0, "B" ); design.set( 0, 1, 0.12 ); design.set( 1, 1, 0.24 ); design.set( 2, 1, 0.48 ); design.set( 3, 1, 0.96 ); design.set( 4, 1, 0.12 ); design.set( 5, 1, 0.24 ); design.set( 6, 1, 0.48 ); design.set( 7, 1, 0.96 ); design.set( 8, 1, 0.96 ); design.set( 0, 2, "C" ); design.set( 1, 2, "C" ); design.set( 2, 2, "D" ); design.set( 3, 2, "D" ); design.set( 4, 2, "C" ); design.set( 5, 2, "C" ); design.set( 6, 2, "D" ); design.set( 7, 2, "D" ); design.set( 8, 2, "D" ); design.addColumnName( "Treat" ); design.addColumnName( "Value" ); design.addColumnName( "Geno" ); DesignMatrix designMatrix = new DesignMatrix( design, true ); designMatrix.addInteraction( "Treat", "Geno" ); LeastSquaresFit fit = new LeastSquaresFit( designMatrix, testMatrix ); Map<String, LinearModelSummary> sums = fit.summarizeByKeys( true ); LinearModelSummary s = sums.get( "1553129_at" ); assertEquals( 1.791, s.getF(), 0.01 ); assertEquals( 0.2930, s.getP(), 0.001 ); assertEquals( 3.71542, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.184, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); // this ordering might change? assertEquals( 0.137, s.getContrastCoefficients().get( 4, 3 ), 0.001 ); // this ordering might change? GenericAnovaResult a = s.getAnova(); assertEquals( 0.137, a.getInteractionEffectP(), 0.001 ); s = sums.get( "232018_at" ); assertEquals( 0.7167, s.getF(), 0.01 ); assertEquals( 0.6235, s.getP(), 0.001 ); assertEquals( 6.8873, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( 0.587932, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); a = s.getAnova(); assertEquals( 0.2904, a.getInteractionEffectP(), 0.001 ); for ( LinearModelSummary lms : sums.values() ) { GenericAnovaResult anova = lms.getAnova(); assertNotNull( anova ); Double interactionEffectP = anova.getInteractionEffectP(); assertNotNull( interactionEffectP ); assertTrue( !Double.isNaN( interactionEffectP ) ); } } @Test public void testVectorRegress() { DoubleMatrix1D vectorA = new DenseDoubleMatrix1D( new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 } ); DoubleMatrix1D vectorB = new DenseDoubleMatrix1D( new double[] { 1, 2, 2, 3, 3, 4, 4, 5, 5, 6 } ); LeastSquaresFit fit = new LeastSquaresFit( vectorA, vectorB ); DoubleMatrix2D coefficients = fit.getCoefficients(); DoubleMatrix2D residuals = fit.getResiduals(); assertEquals( 0.666666, coefficients.get( 0, 0 ), 0.0001 ); assertEquals( 0.5152, coefficients.get( 1, 0 ), 0.0001 ); double[] expectedResiduals = new double[] { -0.1818182, 0.3030303, -0.2121212, 0.2727273, -0.2424242, 0.2424242, -0.2727273, 0.2121212, -0.3030303, 0.1818182 }; for ( int i = 0; i < expectedResiduals.length; i++ ) { assertEquals( expectedResiduals[i], residuals.get( 0, i ), 0.00001 ); } } /** * @throws Exception */ @Test public void testVectorWeightedRegress() throws Exception { // a<-c( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ) // b<-c(1, 2, 2, 3, 3, 4, 4, 5, 5, 6) // w<-1/a DoubleMatrix1D vectorA = new DenseDoubleMatrix1D( new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 } ); DoubleMatrix1D vectorB = new DenseDoubleMatrix1D( new double[] { 1, 2, 2, 3, 3, 4, 4, 5, 5, 6 } ); DoubleMatrix1D w = vectorA.copy().assign( Functions.inv ); LeastSquaresFit fit = new LeastSquaresFit( vectorA, vectorB, w ); DoubleMatrix2D coefficients = fit.getCoefficients(); DoubleMatrix2D residuals = fit.getResiduals(); //lm(b~a, weights=w) //lm.wfit(model.matrix(~a, factor(a)), b, w) assertEquals( 0.60469, coefficients.get( 0, 0 ), 0.0001 ); assertEquals( 0.52642, coefficients.get( 1, 0 ), 0.0001 ); double[] expectedResiduals = new double[] { -0.1311097, 0.3424702, -0.1839499, 0.2896301, -0.2367900, 0.2367900, -0.2896301, 0.1839499, -0.3424702, 0.1311097 }; for ( int i = 0; i < expectedResiduals.length; i++ ) { assertEquals( expectedResiduals[i], residuals.get( 0, i ), 0.00001 ); } double[] expectedFitted = new double[] { 1.13111, 1.65753, 2.18395, 2.71037, 3.23679, 3.76321, 4.28963, 4.81605, 5.34247, 5.86889 }; for ( int i = 0; i < expectedFitted.length; i++ ) { assertEquals( expectedFitted[i], fit.getFitted().get( 0, i ), 0.00001 ); } } /** * @throws Exception */ @Test public void testVectorWeightedRegressWithMissing() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( this.getClass().getResourceAsStream( "/data/example.madata.withmissing.small.txt" ) ); DoubleMatrix1D libSize = MatrixStats.colSums( testMatrix ); testMatrix = MatrixStats.convertToLog2Cpm( testMatrix, libSize ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/example.metadata.small.txt" ) ); DesignMatrix designMatrix = new DesignMatrix( sampleInfo ); DoubleMatrix2D weights = new DenseDoubleMatrix2D( testMatrix.asArray() ); weights.assign( Functions.inv ); LeastSquaresFit fit = new LeastSquaresFit( designMatrix, testMatrix, weights ); assertTrue( fit.isHasMissing() ); DoubleMatrix2D coefficients = fit.getCoefficients(); DoubleMatrix2D residuals = fit.getResiduals(); assertEquals( 15.339801, coefficients.get( 0, 0 ), 0.0001 ); assertEquals( -0.024058, coefficients.get( 1, 1 ), 0.0001 ); assertEquals( -0.059586, coefficients.get( 2, 18 ), 0.0001 ); assertEquals( -0.073732, residuals.get( 0, 0 ), 0.0001 ); assertEquals( -0.064656, residuals.get( 1, 1 ), 0.0001 ); assertEquals( -0.085214, residuals.get( 18, 8 ), 0.0001 ); assertTrue( Double.isNaN( residuals.get( 4, 2 ) ) ); } /** * Tests limma-like functionality * * Multiple levels per factor, unbalanced design * * @throws Exception */ @Test public void testNHBE() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( new GZIPInputStream( this.getClass().getResourceAsStream( "/data/NHBE_transcriptome_data.txt.gz" ) ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/NHBE_design.txt" ) ); DesignMatrix designMatrix = new DesignMatrix( sampleInfo ); designMatrix.addInteraction(); designMatrix.setBaseline( "time", "1_h" ); designMatrix.setBaseline( "Treatment", "control" ); LeastSquaresFit fit = new LeastSquaresFit( designMatrix, testMatrix ); // System.err.println( designMatrix ); // List<LinearModelSummary> sums = fit.summarize( true ); ModeratedTstat.ebayes( fit ); ///////////// // System.err.println( "------- After ebayes ------" ); List<LinearModelSummary> sums = fit.summarize( true ); // fit3$sigma[1] assertEquals( 0.34927, sums.get( 0 ).getSigma(), 0.0001 ); assertEquals( 1.3859, sums.get( 0 ).getPriorDof(), 0.01 ); /* * TODO: add more tests here. */ } /** * Tests limma-like functionality. Balanced 2x2 design * * @throws Exception */ @Test public void testEstrogen() throws Exception { DoubleMatrixReader f = new DoubleMatrixReader(); DoubleMatrix<String, String> testMatrix = f.read( new GZIPInputStream( this.getClass().getResourceAsStream( "/data/estrogen.data.txt.gz" ) ) ); StringMatrixReader of = new StringMatrixReader(); StringMatrix<String, String> sampleInfo = of.read( this.getClass().getResourceAsStream( "/data/estrogen.meta.txt" ) ); DesignMatrix designMatrix = new DesignMatrix( sampleInfo ); designMatrix.addInteraction(); LeastSquaresFit fit = new LeastSquaresFit( designMatrix, testMatrix ); // System.err.println( designMatrix ); List<LinearModelSummary> sums = fit.summarize( true ); // System.err.println( fit.getCoefficients().viewColumn( 0 ) ); LinearModelSummary s = sums.get( 0 ); assertEquals( 3.8976, s.getF(), 0.01 ); assertEquals( 0.11092, s.getP(), 0.001 ); assertEquals( 9.69220, s.getContrastCoefficients().get( 0, 0 ), 0.001 ); assertEquals( -1.4517, s.getContrastCoefficients().get( 1, 2 ), 0.001 ); // tstat assertEquals( 0.220, s.getContrastCoefficients().get( 1, 3 ), 0.001 ); // pvalue assertEquals( 4, s.getResidualDof().intValue() ); GenericAnovaResult a = s.getAnova(); assertEquals( 0.24143, a.getMainEffectF( "time" ), 0.0001 ); assertEquals( 1, a.getMainEffectDof( "time" ).intValue() ); assertEquals( 4, a.getResidualDf().intValue() ); assertEquals( 2.43873, a.getInteractionEffectF(), 0.001 ); assertEquals( 0.19340, a.getInteractionEffectP(), 0.001 ); ///////////// ModeratedTstat.ebayes( fit ); sums = fit.summarize( true ); LinearModelSummary x = sums.get( 0 ); assertEquals( 0.0765, x.getSigma(), 0.0001 ); assertEquals( 4.48, x.getPriorDof(), 0.01 ); // we get 4.479999 or something. assertEquals( 3.8976, x.getF(), 0.01 ); assertEquals( 0.11092, x.getP(), 0.001 ); // topTable(fit3, coef=2,n=dim(fit3)[1], sort.by="none" )["100_g_at",] assertEquals( 9.692196, x.getContrastCoefficients().get( 0, 0 ), 0.0001 ); assertEquals( -0.92608, x.getContrastCoefficients().get( 1, 2 ), 0.001 ); // tstat assertEquals( 0.38, x.getContrastCoefficients().get( 1, 3 ), 0.001 ); // pvalue assertEquals( 4, x.getResidualDof().intValue() ); // topTable(fit3, coef=4,n=dim(fit3)[1], sort.by="none" )["100_g_at",] assertEquals( 0.34671, x.getContrastCoefficients().get( 3, 3 ), 0.0001 ); // interaction pvalue GenericAnovaResult ax = x.getAnova(); // classifyTestsF(fit3$t[,c(2)], df=fit3$df.residual, cor.matrix=cov2cor(fit3$cov.coefficients[2,2]), fstat.only = T)[1] assertEquals( 0.098252, ax.getMainEffectF( "time" ), 0.0001 ); assertEquals( 1, ax.getMainEffectDof( "time" ).intValue() ); assertEquals( 8.48, ax.getResidualDf(), 0.001 ); assertEquals( 3.6678, ax.getMainEffectF( "dose" ), 0.0001 ); assertEquals( 1, ax.getMainEffectDof( "dose" ).intValue() ); assertEquals( 8.48, ax.getResidualDf(), 0.001 ); // 4 + 4.48 // sum(f4^2)/1/sqig^2 // pf(0.99247, 1, 8.48, lower.tail=F) assertEquals( 0.99247, ax.getInteractionEffectF(), 0.0001 ); assertEquals( 0.34671, ax.getInteractionEffectP(), 0.0001 ); } }
package com.zhukai.feign.feign.impl; import com.zhukai.feign.entity.User; import com.zhukai.feign.feign.FeignProviderClient; import org.springframework.stereotype.Component; import java.util.List; /** * Feign容错处理 * * @author zhukai * @date 2021/4/11 */ @Component public class FeignError implements FeignProviderClient { @Override public List<User> findAllUser() { System.out.println("服务器维护中..."); return null; } @Override public User findUserById(String id) { System.out.println("服务器维护中..."); return null; } @Override public void addUser(User user) { System.out.println("服务器维护中..."); } @Override public void updateUser(User user) { System.out.println("服务器维护中..."); } @Override public void deleteUserById(String id) { System.out.println("服务器维护中..."); } }
package org.openforis.collect.earth.app.service; import java.io.File; import org.apache.commons.lang3.SystemUtils; public class FirefoxLocatorFixed{ private FirefoxLocatorFixed(){ } public static String tryToFindFolder(){ String path = null; if (SystemUtils.IS_OS_WINDOWS){ return findInUsualWindowsLocations(); }else if (SystemUtils.IS_OS_MAC){ return findInUsualMacLocations(); } return path; } /** * Dynamic because the directory version number keep changing. */ private static String findInUsualMacLocations() { String[] localAppDataLocations = new String[]{ FolderFinder.getLocalAppDataFolder() + "/Applications/Firefox.app/Contents/MacOS/firefox-bin" , FolderFinder.getLocalAppDataFolder() + "/Applications/Mozilla Firefox.app/Contents/MacOS/firefox-bin" }; for (String path : localAppDataLocations) { File f = new File(path); if( f.exists() ){ return path; } } return null; } private static String findInUsualWindowsLocations() { String[] localAppDataLocations = new String[]{ FolderFinder.getLocalAppDataFolder() + "\\Firefox-3\\firefox.exe" , FolderFinder.getLocalAppDataFolder() + "\\Mozilla Firefox\\firefox.exe", FolderFinder.getLocalAppDataFolder() + "\\Firefox\\firefox.exe" }; for (String path : localAppDataLocations) { File f = new File(path); if( f.exists() ){ return path; } } return null; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workspaces.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workspaces-2015-04-08/DisassociateConnectionAlias" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DisassociateConnectionAliasRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The identifier of the connection alias to disassociate. * </p> */ private String aliasId; /** * <p> * The identifier of the connection alias to disassociate. * </p> * * @param aliasId * The identifier of the connection alias to disassociate. */ public void setAliasId(String aliasId) { this.aliasId = aliasId; } /** * <p> * The identifier of the connection alias to disassociate. * </p> * * @return The identifier of the connection alias to disassociate. */ public String getAliasId() { return this.aliasId; } /** * <p> * The identifier of the connection alias to disassociate. * </p> * * @param aliasId * The identifier of the connection alias to disassociate. * @return Returns a reference to this object so that method calls can be chained together. */ public DisassociateConnectionAliasRequest withAliasId(String aliasId) { setAliasId(aliasId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAliasId() != null) sb.append("AliasId: ").append(getAliasId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DisassociateConnectionAliasRequest == false) return false; DisassociateConnectionAliasRequest other = (DisassociateConnectionAliasRequest) obj; if (other.getAliasId() == null ^ this.getAliasId() == null) return false; if (other.getAliasId() != null && other.getAliasId().equals(this.getAliasId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAliasId() == null) ? 0 : getAliasId().hashCode()); return hashCode; } @Override public DisassociateConnectionAliasRequest clone() { return (DisassociateConnectionAliasRequest) super.clone(); } }
package mezz.jei.api.gui; import mezz.jei.api.IGuiHelper; /** * A timer to help render things that normally depend on ticks. * Get an instance from {@link IGuiHelper#createTickTimer(int, int, boolean)}. * These are used in the internal implementation of {@link IDrawableAnimated}. */ public interface ITickTimer { int getValue(); int getMaxValue(); }
package com._4point.aem.docservices.rest_services.server; import static org.junit.jupiter.api.Assertions.*; import org.junit.jupiter.api.Test; class ByteArrayStringTest { ByteArrayString underTest; @Test void testToString() { underTest = new ByteArrayString("12345".getBytes(), 4); assertEquals("ByteArrayString [data=[49, 50, 51, 52], ascii=[1, 2, 3, 4]]", underTest.toString()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.entity; import org.apache.falcon.FalconException; import org.apache.falcon.Pair; import org.apache.falcon.catalog.AbstractCatalogService; import org.apache.falcon.catalog.CatalogPartition; import org.apache.falcon.catalog.CatalogServiceFactory; import org.apache.falcon.entity.common.FeedDataPath; import org.apache.falcon.entity.v0.AccessControlList; import org.apache.falcon.entity.v0.cluster.Cluster; import org.apache.falcon.entity.v0.cluster.Interfacetype; import org.apache.falcon.entity.v0.feed.CatalogTable; import org.apache.falcon.entity.v0.feed.Feed; import org.apache.falcon.entity.v0.feed.LocationType; import org.apache.falcon.expression.ExpressionHelper; import org.apache.falcon.hadoop.HadoopClientFactory; import org.apache.falcon.retention.EvictedInstanceSerDe; import org.apache.falcon.retention.EvictionHelper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.jsp.el.ELException; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; /** * A catalog registry implementation of a feed storage. */ public class CatalogStorage extends Configured implements Storage { private static final Logger LOG = LoggerFactory.getLogger(EvictionHelper.class); // constants to be used while preparing HCatalog partition filter query private static final String FILTER_ST_BRACKET = "("; private static final String FILTER_END_BRACKET = ")"; private static final String FILTER_QUOTE = "'"; private static final String FILTER_AND = " and "; private static final String FILTER_OR = " or "; private static final String FILTER_LESS_THAN = " < "; private static final String FILTER_EQUALS = " = "; private final StringBuffer instancePaths = new StringBuffer(); private final StringBuilder instanceDates = new StringBuilder(); public static final String PARTITION_SEPARATOR = ";"; public static final String PARTITION_KEYVAL_SEPARATOR = "="; public static final String INPUT_PATH_SEPARATOR = ":"; public static final String OUTPUT_PATH_SEPARATOR = "/"; public static final String PARTITION_VALUE_QUOTE = "'"; public static final String CATALOG_URL = "${hcatNode}"; private final String catalogUrl; private String database; private String table; private Map<String, String> partitions; protected CatalogStorage(Feed feed) throws URISyntaxException { this(CATALOG_URL, feed.getTable()); } public CatalogStorage(Cluster cluster, CatalogTable table) throws URISyntaxException { this(ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).getEndpoint(), table); } protected CatalogStorage(String catalogUrl, CatalogTable table) throws URISyntaxException { this(catalogUrl, table.getUri()); } protected CatalogStorage(String catalogUrl, String tableUri) throws URISyntaxException { if (catalogUrl == null || catalogUrl.length() == 0) { throw new IllegalArgumentException("Catalog Registry URL cannot be null or empty"); } this.catalogUrl = catalogUrl; parseFeedUri(tableUri); } /** * Validate URI to conform to catalog:$database:$table#$partitions. * scheme=catalog:database=$database:table=$table#$partitions * partitions=key=value;key=value * * @param catalogTableUri table URI to parse and validate * @throws URISyntaxException */ private void parseFeedUri(String catalogTableUri) throws URISyntaxException { final String processed = catalogTableUri.replaceAll(DOLLAR_EXPR_START_REGEX, DOLLAR_EXPR_START_NORMALIZED) .replaceAll("}", EXPR_CLOSE_NORMALIZED); URI tableUri = new URI(processed); if (!"catalog".equals(tableUri.getScheme())) { throw new URISyntaxException(tableUri.toString(), "catalog scheme is missing"); } final String schemeSpecificPart = tableUri.getSchemeSpecificPart(); if (schemeSpecificPart == null) { throw new URISyntaxException(tableUri.toString(), "Database and Table are missing"); } String[] paths = schemeSpecificPart.split(INPUT_PATH_SEPARATOR); if (paths.length != 2) { throw new URISyntaxException(tableUri.toString(), "URI path is not in expected format: database:table"); } database = paths[0]; table = paths[1]; if (database == null || database.length() == 0) { throw new URISyntaxException(tableUri.toString(), "DB name is missing"); } if (table == null || table.length() == 0) { throw new URISyntaxException(tableUri.toString(), "Table name is missing"); } String partRaw = tableUri.getFragment(); if (partRaw == null || partRaw.length() == 0) { throw new URISyntaxException(tableUri.toString(), "Partition details are missing"); } final String rawPartition = partRaw.replaceAll(DOLLAR_EXPR_START_NORMALIZED, DOLLAR_EXPR_START_REGEX) .replaceAll(EXPR_CLOSE_NORMALIZED, EXPR_CLOSE_REGEX); partitions = new LinkedHashMap<String, String>(); // preserve insertion order String[] parts = rawPartition.split(PARTITION_SEPARATOR); for (String part : parts) { if (part == null || part.length() == 0) { continue; } String[] keyVal = part.split(PARTITION_KEYVAL_SEPARATOR); if (keyVal.length != 2) { throw new URISyntaxException(tableUri.toString(), "Partition key value pair is not specified properly in (" + part + ")"); } partitions.put(keyVal[0], keyVal[1]); } } /** * Create an instance from the URI Template that was generated using * the getUriTemplate() method. * * @param uriTemplate the uri template from org.apache.falcon.entity.CatalogStorage#getUriTemplate * @throws URISyntaxException */ protected CatalogStorage(String uriTemplate) throws URISyntaxException { if (uriTemplate == null || uriTemplate.length() == 0) { throw new IllegalArgumentException("URI template cannot be null or empty"); } final String processed = uriTemplate.replaceAll(DOLLAR_EXPR_START_REGEX, DOLLAR_EXPR_START_NORMALIZED) .replaceAll("}", EXPR_CLOSE_NORMALIZED); URI uri = new URI(processed); this.catalogUrl = uri.getScheme() + "://" + uri.getAuthority(); parseUriTemplate(uri); } protected CatalogStorage(String uriTemplate, Configuration conf) throws URISyntaxException { this(uriTemplate); setConf(conf); } private void parseUriTemplate(URI uriTemplate) throws URISyntaxException { String path = uriTemplate.getPath(); String[] paths = path.split(OUTPUT_PATH_SEPARATOR); if (paths.length != 4) { throw new URISyntaxException(uriTemplate.toString(), "URI path is not in expected format: database:table"); } database = paths[1]; table = paths[2]; String partRaw = paths[3]; if (database == null || database.length() == 0) { throw new URISyntaxException(uriTemplate.toString(), "DB name is missing"); } if (table == null || table.length() == 0) { throw new URISyntaxException(uriTemplate.toString(), "Table name is missing"); } if (partRaw == null || partRaw.length() == 0) { throw new URISyntaxException(uriTemplate.toString(), "Partition details are missing"); } String rawPartition = partRaw.replaceAll(DOLLAR_EXPR_START_NORMALIZED, DOLLAR_EXPR_START_REGEX) .replaceAll(EXPR_CLOSE_NORMALIZED, EXPR_CLOSE_REGEX); partitions = new LinkedHashMap<String, String>(); String[] parts = rawPartition.split(PARTITION_SEPARATOR); for (String part : parts) { if (part == null || part.length() == 0) { continue; } String[] keyVal = part.split(PARTITION_KEYVAL_SEPARATOR); if (keyVal.length != 2) { throw new URISyntaxException(uriTemplate.toString(), "Partition key value pair is not specified properly in (" + part + ")"); } partitions.put(keyVal[0], keyVal[1]); } } public String getCatalogUrl() { return catalogUrl; } public String getDatabase() { return database; } public String getTable() { return table; } public Map<String, String> getPartitions() { return partitions; } /** * @param key partition key * @return partition value */ public String getPartitionValue(String key) { return partitions.get(key); } /** * @param key partition key * @return if partitions map includes the key or not */ public boolean hasPartition(String key) { return partitions.containsKey(key); } public List<String> getDatedPartitionKeys() { List<String> keys = new ArrayList<String>(); for (Map.Entry<String, String> entry : getPartitions().entrySet()) { Matcher matcher = FeedDataPath.PATTERN.matcher(entry.getValue()); if (matcher.find()) { keys.add(entry.getKey()); } } return keys; } /** * Convert the partition map to filter string. * Each key value pair is separated by ';'. * * @return filter string */ public String toPartitionFilter() { StringBuilder filter = new StringBuilder(); filter.append("("); for (Map.Entry<String, String> entry : partitions.entrySet()) { if (filter.length() > 1) { filter.append(PARTITION_SEPARATOR); } filter.append(entry.getKey()); filter.append(PARTITION_KEYVAL_SEPARATOR); filter.append(PARTITION_VALUE_QUOTE); filter.append(entry.getValue()); filter.append(PARTITION_VALUE_QUOTE); } filter.append(")"); return filter.toString(); } /** * Convert the partition map to path string. * Each key value pair is separated by '/'. * * @return path string */ public String toPartitionAsPath() { StringBuilder partitionFilter = new StringBuilder(); for (Map.Entry<String, String> entry : getPartitions().entrySet()) { partitionFilter.append(entry.getKey()) .append(PARTITION_KEYVAL_SEPARATOR) .append(entry.getValue()) .append(OUTPUT_PATH_SEPARATOR); } partitionFilter.setLength(partitionFilter.length() - 1); return partitionFilter.toString(); } @Override public TYPE getType() { return TYPE.TABLE; } /** * LocationType does NOT matter here. */ @Override public String getUriTemplate() { return getUriTemplate(LocationType.DATA); } /** * LocationType does NOT matter here. */ @Override public String getUriTemplate(LocationType locationType) { StringBuilder uriTemplate = new StringBuilder(); uriTemplate.append(catalogUrl); uriTemplate.append(OUTPUT_PATH_SEPARATOR); uriTemplate.append(database); uriTemplate.append(OUTPUT_PATH_SEPARATOR); uriTemplate.append(table); uriTemplate.append(OUTPUT_PATH_SEPARATOR); for (Map.Entry<String, String> entry : partitions.entrySet()) { uriTemplate.append(entry.getKey()); uriTemplate.append(PARTITION_KEYVAL_SEPARATOR); uriTemplate.append(entry.getValue()); uriTemplate.append(PARTITION_SEPARATOR); } uriTemplate.setLength(uriTemplate.length() - 1); return uriTemplate.toString(); } @Override public boolean isIdentical(Storage toCompareAgainst) throws FalconException { if (!(toCompareAgainst instanceof CatalogStorage)) { return false; } CatalogStorage catalogStorage = (CatalogStorage) toCompareAgainst; return !(getCatalogUrl() != null && !getCatalogUrl().equals(catalogStorage.getCatalogUrl())) && getDatabase().equals(catalogStorage.getDatabase()) && getTable().equals(catalogStorage.getTable()) && getPartitions().equals(catalogStorage.getPartitions()); } @Override public void validateACL(AccessControlList acl) throws FalconException { // This is not supported in Hive today as authorization is not enforced on table and // partition listing } @Override public List<FeedInstanceStatus> getListing(Feed feed, String cluster, LocationType locationType, Date start, Date end) throws FalconException { throw new UnsupportedOperationException("getListing"); } @Override public StringBuilder evict(String retentionLimit, String timeZone, Path logFilePath) throws FalconException { LOG.info("Applying retention on {}, Limit: {}, timezone: {}", getTable(), retentionLimit, timeZone); List<CatalogPartition> toBeDeleted; try { // get sorted date partition keys and values toBeDeleted = discoverPartitionsToDelete(retentionLimit, timeZone); } catch (ELException e) { throw new FalconException("Couldn't find partitions to be deleted", e); } if (toBeDeleted.isEmpty()) { LOG.info("No partitions to delete."); } else { final boolean isTableExternal = CatalogServiceFactory.getCatalogService().isTableExternal( getConf(), getCatalogUrl(), getDatabase(), getTable()); try { dropPartitions(toBeDeleted, isTableExternal); } catch (IOException e) { throw new FalconException("Couldn't drop partitions", e); } } try { EvictedInstanceSerDe.serializeEvictedInstancePaths( HadoopClientFactory.get().createProxiedFileSystem(logFilePath.toUri(), new Configuration()), logFilePath, instancePaths); } catch (IOException e) { throw new FalconException("Couldn't record dropped partitions", e); } return instanceDates; } private List<CatalogPartition> discoverPartitionsToDelete(String retentionLimit, String timezone) throws FalconException, ELException { Pair<Date, Date> range = EvictionHelper.getDateRange(retentionLimit); ExpressionHelper.setReferenceDate(range.first); Map<String, String> partitionsToDelete = new LinkedHashMap<String, String>(); ExpressionHelper expressionHelper = ExpressionHelper.get(); for (Map.Entry<String, String> entry : getPartitions().entrySet()) { if (FeedDataPath.PATTERN.matcher(entry.getValue()).find()) { partitionsToDelete.put(entry.getKey(), expressionHelper.evaluateFullExpression(entry.getValue(), String.class)); } } final String filter = createFilter(partitionsToDelete); return CatalogServiceFactory.getCatalogService().listPartitionsByFilter( getConf(), getCatalogUrl(), getDatabase(), getTable(), filter); } /** * Creates hive partition filter from inputs partition map. * @param partitionsMap - ordered map of partition keys and values * @return partition filter * @throws ELException */ private String createFilter(Map<String, String> partitionsMap) throws ELException { /* Construct filter query string. As an example, suppose the dated partition keys * are: [year, month, day, hour] and dated partition values are [2014, 02, 24, 10]. * Then the filter query generated is of the format: * "(year < '2014') or (year = '2014' and month < '02') or * (year = '2014' and month = '02' and day < '24') or * or (year = '2014' and month = '02' and day = '24' and hour < '10')" */ StringBuilder filterBuffer = new StringBuilder(); List<String> keys = new ArrayList<String>(partitionsMap.keySet()); for (int curr = 0; curr < partitionsMap.size(); curr++) { if (curr > 0) { filterBuffer.append(FILTER_OR); } filterBuffer.append(FILTER_ST_BRACKET); for (int prev = 0; prev < curr; prev++) { String key = keys.get(prev); filterBuffer.append(key) .append(FILTER_EQUALS) .append(FILTER_QUOTE) .append(partitionsMap.get(key)) .append(FILTER_QUOTE) .append(FILTER_AND); } String key = keys.get(curr); filterBuffer.append(key) .append(FILTER_LESS_THAN) .append(FILTER_QUOTE) .append(partitionsMap.get(key)) .append(FILTER_QUOTE) .append(FILTER_END_BRACKET); } return filterBuffer.toString(); } private void dropPartitions(List<CatalogPartition> partitionsToDelete, boolean isTableExternal) throws FalconException, IOException { AbstractCatalogService catalogService = CatalogServiceFactory.getCatalogService(); for (CatalogPartition partition : partitionsToDelete) { boolean deleted = catalogService.dropPartition(getConf(), getCatalogUrl(), getDatabase(), getTable(), partition.getValues(), true); if (!deleted) { return; } if (isTableExternal) { // nuke the dirs if an external table final Path path = new Path(partition.getLocation()); if (!HadoopClientFactory.get().createProxiedFileSystem(path.toUri()).delete(path, true)) { throw new FalconException("Failed to delete location " + path + " for partition " + partition.getValues()); } } // replace ',' with ';' since message producer splits instancePaths string by ',' String partitionInfo = partition.getValues().toString().replace(",", ";"); LOG.info("Deleted partition: " + partitionInfo); instanceDates.append(partitionInfo).append(','); instancePaths.append(partition.getLocation()).append(EvictedInstanceSerDe.INSTANCEPATH_SEPARATOR); } } @Override public String toString() { return "CatalogStorage{" + "catalogUrl='" + catalogUrl + '\'' + ", database='" + database + '\'' + ", table='" + table + '\'' + ", partitions=" + partitions + '}'; } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can * obtain a copy of the License at * https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html * or packager/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at packager/legal/LICENSE.txt. * * GPL Classpath Exception: * Oracle designates this particular file as subject to the "Classpath" * exception as provided by Oracle in the GPL Version 2 section of the License * file that accompanied this code. * * Modifications: * If applicable, add the following below the License Header, with the fields * enclosed by brackets [] replaced by your own identifying information: * "Portions Copyright [year] [name of copyright owner]" * * Contributor(s): * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ package org.glassfish.admingui.devtests; import org.junit.Test; import org.openqa.selenium.By; import org.openqa.selenium.support.ui.Select; import static org.junit.Assert.assertEquals; /** * * @author Jeremy Lv * */ public class VirtualServerTest extends BaseSeleniumTestClass { @Test public void testAddVirtualServer() { gotoDasPage(); final String serverName = "vs" + generateRandomString(); clickAndWait("treeForm:tree:configurations:server-config:virtualServers:virtualServers_link"); clickAndWait("propertyForm:configs:topActionsGroup1:newButton"); setFieldValue("propertyForm:propertySheet:propertSectionTextField:IdTextProp:IdText", serverName); setFieldValue("propertyForm:propertySheet:propertSectionTextField:hostsProp:Hosts", "localhost"); setFieldValue("propertyForm:propertySheet:propertSectionTextField:logFileProp:LogFile", "logfile.txt"); setFieldValue("propertyForm:propertySheet:propertSectionTextField:docroot:docroot", "/tmp"); Select select = new Select(driver.findElement(By.id("propertyForm:propertySheet:propertSectionTextField:nwProps:nw"))); select.selectByVisibleText("http-listener-1"); int count = addTableRow("propertyForm:basicTable", "propertyForm:basicTable:topActionsGroup1:addSharedTableButton"); setFieldValue("propertyForm:basicTable:rowGroup1:0:col2:col1St", "property"); setFieldValue("propertyForm:basicTable:rowGroup1:0:col3:col1St", "value"); setFieldValue("propertyForm:basicTable:rowGroup1:0:col4:col1St", "description"); clickAndWait("propertyForm:propertyContentPage:topButtons:newButton"); String prefix = getTableRowByValue("propertyForm:configs", serverName, "col1"); assertEquals(serverName, getText(prefix + "col1:link")); String clickId = prefix + "col1:link"; clickByIdAction(clickId); assertTableRowCount("propertyForm:basicTable", count); clickAndWait("propertyForm:propertyContentPage:topButtons:cancelButton"); deleteRow("propertyForm:configs:topActionsGroup1:button1", "propertyForm:configs", serverName); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2021 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.addon.automation.jobs; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.lang.StringUtils; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.core.scanner.Plugin; import org.parosproxy.paros.core.scanner.Plugin.AlertThreshold; import org.parosproxy.paros.core.scanner.Plugin.AttackStrength; import org.parosproxy.paros.core.scanner.PluginFactory; import org.zaproxy.addon.automation.AutomationData; import org.zaproxy.addon.automation.AutomationEnvironment; import org.zaproxy.addon.automation.AutomationJob; import org.zaproxy.addon.automation.AutomationProgress; import org.zaproxy.addon.automation.ContextWrapper; import org.zaproxy.addon.automation.JobResultData; import org.zaproxy.addon.automation.gui.ActiveScanJobDialog; import org.zaproxy.zap.extension.ascan.ActiveScan; import org.zaproxy.zap.extension.ascan.ExtensionActiveScan; import org.zaproxy.zap.extension.ascan.ScanPolicy; import org.zaproxy.zap.model.Target; import org.zaproxy.zap.users.User; public class ActiveScanJob extends AutomationJob { public static final String JOB_NAME = "activeScan"; private static final String OPTIONS_METHOD_NAME = "getScannerParam"; private static final String PARAM_CONTEXT = "context"; private static final String PARAM_POLICY = "policy"; private static final String RULES_ELEMENT_NAME = "rules"; private ExtensionActiveScan extAScan; private Parameters parameters = new Parameters(); private PolicyDefinition policyDefinition = new PolicyDefinition(); private Data data; public ActiveScanJob() { data = new Data(this, this.parameters, this.policyDefinition); } private ExtensionActiveScan getExtAScan() { if (extAScan == null) { extAScan = Control.getSingleton() .getExtensionLoader() .getExtension(ExtensionActiveScan.class); } return extAScan; } @Override public void verifyParameters(AutomationProgress progress) { Map<?, ?> jobData = this.getJobData(); if (jobData == null) { return; } LinkedHashMap<?, ?> params = (LinkedHashMap<?, ?>) jobData.get("parameters"); JobUtils.applyParamsToObject(params, this.parameters, this.getName(), null, progress); this.verifyUser(this.getParameters().getUser(), progress); // Parse the policy defn Object policyDefn = this.getJobData().get("policyDefinition"); if (policyDefn instanceof LinkedHashMap<?, ?>) { LinkedHashMap<?, ?> policyDefnData = (LinkedHashMap<?, ?>) policyDefn; JobUtils.applyParamsToObject( policyDefnData, this.policyDefinition, this.getName(), new String[] {RULES_ELEMENT_NAME}, progress); ScanPolicy scanPolicy = new ScanPolicy(); PluginFactory pluginFactory = scanPolicy.getPluginFactory(); Object o = policyDefnData.get(RULES_ELEMENT_NAME); if (o instanceof ArrayList<?>) { ArrayList<?> ruleData = (ArrayList<?>) o; for (Object ruleObj : ruleData) { if (ruleObj instanceof LinkedHashMap<?, ?>) { LinkedHashMap<?, ?> ruleMap = (LinkedHashMap<?, ?>) ruleObj; Integer id = (Integer) ruleMap.get("id"); Plugin plugin = pluginFactory.getPlugin(id); if (plugin != null) { AttackStrength strength = JobUtils.parseAttackStrength( ruleMap.get("strength"), this.getName(), progress); AlertThreshold threshold = JobUtils.parseAlertThreshold( ruleMap.get("threshold"), this.getName(), progress); Rule rule = new Rule(); rule.setId(id); rule.setName(plugin.getName()); if (threshold != null) { rule.setThreshold(threshold.name().toLowerCase()); } if (strength != null) { rule.setStrength(strength.name().toLowerCase()); } this.getData().getPolicyDefinition().addRule(rule); } else { progress.warn( Constant.messages.getString( "automation.error.ascan.rule.unknown", this.getName(), id)); } } } } else if (o != null) { progress.warn( Constant.messages.getString( "automation.error.options.badlist", this.getName(), RULES_ELEMENT_NAME, o)); } } else if (policyDefn != null) { progress.warn( Constant.messages.getString( "automation.error.options.badlist", this.getName(), "policyDefinition", policyDefn)); } } @Override public void applyParameters(AutomationProgress progress) { JobUtils.applyObjectToObject( this.parameters, JobUtils.getJobOptions(this, progress), this.getName(), new String[] {PARAM_POLICY, PARAM_CONTEXT}, progress, this.getPlan().getEnv()); } @Override public Map<String, String> getCustomConfigParameters() { Map<String, String> map = super.getCustomConfigParameters(); map.put(PARAM_CONTEXT, ""); return map; } @Override public void runJob(AutomationEnvironment env, AutomationProgress progress) { ContextWrapper context; if (this.getParameters().getContext() != null) { context = env.getContextWrapper(this.getParameters().getContext()); if (context == null) { progress.error( Constant.messages.getString( "automation.error.context.unknown", this.getParameters().getContext())); return; } } else { context = env.getDefaultContextWrapper(); } Target target = new Target(context.getContext()); target.setRecurse(true); List<Object> contextSpecificObjects = new ArrayList<>(); User user = this.getUser(this.getParameters().getUser(), progress); ScanPolicy scanPolicy = null; if (!StringUtils.isEmpty(this.getParameters().getPolicy())) { try { scanPolicy = this.getExtAScan() .getPolicyManager() .getPolicy(this.getParameters().getPolicy()); } catch (ConfigurationException e) { // Error already raised above } } else { scanPolicy = this.getScanPolicy(progress); } if (scanPolicy != null) { contextSpecificObjects.add(scanPolicy); } int scanId = this.getExtAScan().startScan(target, user, contextSpecificObjects.toArray()); long endTime = Long.MAX_VALUE; if (JobUtils.unBox(this.getParameters().getMaxScanDurationInMins()) > 0) { // The active scan should stop, if it doesnt we will stop it (after a few seconds // leeway) endTime = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis( this.getParameters().getMaxScanDurationInMins()) + TimeUnit.SECONDS.toMillis(5); } // Wait for the active scan to finish ActiveScan scan; while (true) { try { Thread.sleep(500); } catch (InterruptedException e) { // Ignore } scan = this.getExtAScan().getScan(scanId); if (scan.isStopped()) { break; } if (System.currentTimeMillis() > endTime) { // It should have stopped but didn't (happens occasionally) this.getExtAScan().stopScan(scanId); break; } } progress.addJobResultData(createJobResultData(scanId)); } @Override public List<JobResultData> getJobResultData() { ActiveScan lastScan = this.getExtAScan().getLastScan(); if (lastScan != null) { return createJobResultData(lastScan.getId()); } return new ArrayList<>(); } private List<JobResultData> createJobResultData(int scanId) { List<JobResultData> list = new ArrayList<>(); list.add(new ActiveScanJobResultData(this.getName(), this.getExtAScan().getScan(scanId))); return list; } protected ScanPolicy getScanPolicy(AutomationProgress progress) { ScanPolicy scanPolicy = new ScanPolicy(); // Set default strength AttackStrength st = JobUtils.parseAttackStrength( this.getData().getPolicyDefinition().getDefaultStrength(), this.getName(), progress); if (st != null) { scanPolicy.setDefaultStrength(st); progress.info( Constant.messages.getString( "automation.info.ascan.setdefstrength", this.getName(), st.name())); } // Set default threshold PluginFactory pluginFactory = scanPolicy.getPluginFactory(); AlertThreshold th = JobUtils.parseAlertThreshold( this.getData().getPolicyDefinition().getDefaultThreshold(), this.getName(), progress); if (th != null) { scanPolicy.setDefaultThreshold(th); if (th == AlertThreshold.OFF) { for (Plugin plugin : pluginFactory.getAllPlugin()) { plugin.setEnabled(false); } } else { scanPolicy.setDefaultThreshold(th); } progress.info( Constant.messages.getString( "automation.info.ascan.setdefthreshold", this.getName(), th.name())); } // Configure any rules for (Rule rule : this.getData().getPolicyDefinition().getRules()) { Plugin plugin = pluginFactory.getPlugin(rule.getId()); if (plugin == null) { // Will have already warned about this continue; } AttackStrength pluginSt = JobUtils.parseAttackStrength(rule.getStrength(), this.getName(), progress); if (pluginSt != null) { plugin.setAttackStrength(pluginSt); plugin.setEnabled(true); progress.info( Constant.messages.getString( "automation.info.ascan.rule.setstrength", this.getName(), rule.getId(), pluginSt.name())); } AlertThreshold pluginTh = JobUtils.parseAlertThreshold(rule.getThreshold(), this.getName(), progress); if (pluginTh != null) { plugin.setAlertThreshold(pluginTh); plugin.setEnabled(!AlertThreshold.OFF.equals(pluginTh)); progress.info( Constant.messages.getString( "automation.info.ascan.rule.setthreshold", this.getName(), rule.getId(), pluginTh.name())); } } return scanPolicy; } @Override public boolean isExcludeParam(String param) { switch (param) { case "allowAttackOnStart": case "attackPolicy": case "hostPerScan": case "maxChartTimeInMins": case "maxResultsToList": case "maxScansInUI": case "promptInAttackMode": case "promptToClearFinishedScans": case "rescanInAttackMode": case "showAdvancedDialog": case "targetParamsInjectable": case "targetParamsEnabledRPC": return true; default: return false; } } @Override public String getSummary() { String context = this.getParameters().getContext(); if (StringUtils.isEmpty(context)) { context = Constant.messages.getString("automation.dialog.default"); } return Constant.messages.getString("automation.dialog.ascan.summary", context); } @Override public Data getData() { return data; } @Override public Parameters getParameters() { return parameters; } @Override public String getType() { return JOB_NAME; } @Override public Order getOrder() { return Order.ATTACK; } @Override public Object getParamMethodObject() { return this.getExtAScan(); } @Override public String getParamMethodName() { return OPTIONS_METHOD_NAME; } @Override public void showDialog() { new ActiveScanJobDialog(this).setVisible(true); } public static class Rule extends AutomationData { private int id; private String name; private String threshold; private String strength; public Rule() {} public Rule(int id, String name, String threshold, String strength) { this.id = id; this.name = name; this.threshold = threshold; this.strength = strength; } public Rule copy() { return new Rule(id, name, threshold, strength); } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getThreshold() { return threshold; } public void setThreshold(String threshold) { this.threshold = threshold; } public String getStrength() { return strength; } public void setStrength(String strength) { this.strength = strength; } } public static class Data extends JobData { private Parameters parameters; private PolicyDefinition policyDefinition; public Data(AutomationJob job, Parameters parameters, PolicyDefinition policyDefinition) { super(job); this.parameters = parameters; this.policyDefinition = policyDefinition; } public Parameters getParameters() { return parameters; } public PolicyDefinition getPolicyDefinition() { return policyDefinition; } } public static class PolicyDefinition extends AutomationData { private String defaultStrength; private String defaultThreshold; private List<Rule> rules = new ArrayList<>(); public String getDefaultStrength() { return defaultStrength; } public void setDefaultStrength(String defaultStrength) { this.defaultStrength = defaultStrength; } public String getDefaultThreshold() { return defaultThreshold; } public void setDefaultThreshold(String defaultThreshold) { this.defaultThreshold = defaultThreshold; } public List<Rule> getRules() { return rules.stream().map(Rule::copy).collect(Collectors.toList()); } public void addRule(Rule rule) { this.rules.add(rule); } public void removeRule(Rule rule) { this.rules.remove(rule); } public void setRules(List<Rule> rules) { this.rules = rules; } } public static class Parameters extends AutomationData { private String context; private String user; private String policy; private Integer maxRuleDurationInMins; private Integer maxScanDurationInMins; private Boolean addQueryParam; private String defaultPolicy; private Integer delayInMs; private Boolean handleAntiCSRFTokens; private Boolean injectPluginIdInHeader; private Boolean scanHeadersAllRequests; private Integer threadPerHost; public Parameters() {} public String getContext() { return context; } public void setContext(String context) { this.context = context; } public String getUser() { return user; } public void setUser(String user) { this.user = user; } public String getPolicy() { return policy; } public void setPolicy(String policy) { this.policy = policy; } public Integer getMaxRuleDurationInMins() { return maxRuleDurationInMins; } public void setMaxRuleDurationInMins(Integer maxRuleDurationInMins) { this.maxRuleDurationInMins = maxRuleDurationInMins; } public Integer getMaxScanDurationInMins() { return maxScanDurationInMins; } public void setMaxScanDurationInMins(Integer maxScanDurationInMins) { this.maxScanDurationInMins = maxScanDurationInMins; } public Boolean getAddQueryParam() { return addQueryParam; } public void setAddQueryParam(Boolean addQueryParam) { this.addQueryParam = addQueryParam; } public String getDefaultPolicy() { return defaultPolicy; } public void setDefaultPolicy(String defaultPolicy) { this.defaultPolicy = defaultPolicy; } public Integer getDelayInMs() { return delayInMs; } public void setDelayInMs(Integer delayInMs) { this.delayInMs = delayInMs; } public Boolean getHandleAntiCSRFTokens() { return handleAntiCSRFTokens; } public void setHandleAntiCSRFTokens(Boolean handleAntiCSRFTokens) { this.handleAntiCSRFTokens = handleAntiCSRFTokens; } public Boolean getInjectPluginIdInHeader() { return injectPluginIdInHeader; } public void setInjectPluginIdInHeader(Boolean injectPluginIdInHeader) { this.injectPluginIdInHeader = injectPluginIdInHeader; } public Boolean getScanHeadersAllRequests() { return scanHeadersAllRequests; } public void setScanHeadersAllRequests(Boolean scanHeadersAllRequests) { this.scanHeadersAllRequests = scanHeadersAllRequests; } public Integer getThreadPerHost() { return threadPerHost; } public void setThreadPerHost(Integer threadPerHost) { this.threadPerHost = threadPerHost; } } }
package org.inmogr.sample.images.downloader; import android.os.Environment; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.EditText; import android.widget.TextView; import com.android.volley.Request; import org.inmogr.sample.images.downloader.sample.download.library.Connector; import org.inmogr.sample.images.downloader.sample.download.library.InputStreamRequest; import org.inmogr.sample.images.downloader.sample.download.library.handler.InputStreamResponseHandler; import java.net.MalformedURLException; import java.net.URL; public class RequestFileActivity extends AppCompatActivity { private Request request; private Connector connector = new Connector() { @Override public void completedSuccessfully(int invokedBy, Object response) { if (response instanceof byte[]) { InputStreamResponseHandler isrh = new InputStreamResponseHandler(getApplication()); String savePath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).getPath(); boolean saved = isrh.saveFile((InputStreamRequest) request, (byte[]) response, savePath); if (saved) { ((TextView) findViewById(R.id.responseFileRequest)).setText(getString(R.string.saved_in_downloads)); } else { ((TextView) findViewById(R.id.responseFileRequest)).setText(getString(R.string.failed_to_save_file)); } } hideFloatingButton(); } @Override public void completedWithError(String error) { ((TextView) findViewById(R.id.responseFileRequest)).setText(error); hideFloatingButton(); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_request_file); connector.init(getCacheDir(), getWindow().getDecorView().getRootView()); } private void showFloatingButton() { findViewById(R.id.cancelFileRequest).setVisibility(View.VISIBLE); } private void hideFloatingButton() { findViewById(R.id.cancelFileRequest).setVisibility(View.GONE); } private String getUrl() { EditText editText = findViewById(R.id.urlFileRequest); String url = editText.getText().toString(); try { new URL(url); return url; } catch (MalformedURLException e) { editText.setError(getString(R.string.invalid_url)); return null; } } public void get(View view) { String url = getUrl(); if (url == null) return; request = connector.addGetRequestOfFile(url, null); showFloatingButton(); } public void post(View view) { String url = getUrl(); if (url == null) return; request = connector.addPostRequestOfFile(url, null); showFloatingButton(); } public void cancelRequest(View view) { if (request == null) return; request.cancel(); request = null; hideFloatingButton(); } }
/* * Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software;Designed and Developed mainly by many Chinese * opensource volunteers. you can redistribute it and/or modify it under the * terms of the GNU General Public License version 2 only, as published by the * Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Any questions about this component can be directed to it's project Web address * https://code.google.com/p/opencloudb/. * */ package org.opencloudb.performance; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; public class GoodsInsertJob implements Runnable { private final long endId; private long finsihed; private final int batchSize; private final AtomicLong finshiedCount; private final AtomicLong failedCount; Calendar date = Calendar.getInstance(); DateFormat datafomat = new SimpleDateFormat("yyyy-MM-dd"); private final SimpleConPool conPool; public GoodsInsertJob(SimpleConPool conPool, long totalRecords, int batchSize, long startId, AtomicLong finshiedCount, AtomicLong failedCount) { super(); this.conPool = conPool; this.endId = startId + totalRecords - 1; this.batchSize = batchSize; this.finsihed = startId; this.finshiedCount = finshiedCount; this.failedCount = failedCount; } private int insert(Connection con,List<Map<String, String>> list) throws SQLException { PreparedStatement ps; String sql = "insert into goods (id,name ,good_type,good_img_url,good_created ,good_desc, price ) values(?,? ,?,?,? ,?, ?)"; ps = con.prepareStatement(sql); for (Map<String, String> map : list) { ps.setLong(1, Long.parseLong(map.get("id"))); ps.setString(2, (String) map.get("name")); ps.setShort(3, Short.parseShort(map.get("good_type"))); ps.setString(4, (String) map.get("good_img_url")); ps.setString(5, (String) map.get("good_created")); ps.setString(6, (String) map.get("good_desc")); ps.setDouble(7, Double.parseDouble(map.get("price"))); ps.addBatch(); } ps.executeBatch(); return list.size(); } private List<Map<String, String>> getNextBatch() { if (finsihed >= endId) { return Collections.emptyList(); } long end = (finsihed + batchSize) < this.endId ? (finsihed + batchSize) : endId; // the last batch if (end + batchSize > this.endId) { end = this.endId; } List<Map<String, String>> list = new ArrayList<Map<String, String>>( ); for (long i = finsihed; i < end; i++) { Map<String, String> m = new HashMap<String, String>(); m.put("id", i + ""); m.put("name", "googs " + i); m.put("good_type", i % 100 + ""); m.put("good_img_url", "http://openclouddb.org/" + i); m.put("good_created", getRandomDay(i)); m.put("good_desc", "best goods " + i); m.put("price", (i + 0.0) % 1000 + ""); list.add(m); } finsihed += list.size(); return list; } private String getRandomDay(long i) { int month = Long.valueOf(i % 11 + 1).intValue(); int day = Long.valueOf(i % 27 + 1).intValue(); date.set(Calendar.MONTH, month); date.set(Calendar.DAY_OF_MONTH, day); return datafomat.format(date.getTime()); } @Override public void run() { Connection con = null; try { List<Map<String, String>> batch = getNextBatch(); while (!batch.isEmpty()) { try { if (con == null || con.isClosed()) { con = conPool.getConnection(); con.setAutoCommit(true); } insert(con, batch); finshiedCount.addAndGet(batch.size()); } catch (Exception e) { failedCount.addAndGet(batch.size()); e.printStackTrace(); } batch = getNextBatch(); } } finally { if (con != null) { this.conPool.returnCon(con); } } } }
/** * Kinetic Smart API * Kinetic Smart Swagger Implementatin * * OpenAPI spec version: 1.0.0 * Contact: dev@citypay.com * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package com.citypay.pos.kinetic.model; import io.swagger.annotations.*; import com.google.gson.annotations.SerializedName; @ApiModel(description = "") public class BatteryResponse { @SerializedName("success") private Boolean success = null; @SerializedName("charging") private Boolean charging = null; @SerializedName("percentage") private Integer percentage = null; /** **/ @ApiModelProperty(value = "") public Boolean getSuccess() { return success; } public void setSuccess(Boolean success) { this.success = success; } /** **/ @ApiModelProperty(value = "") public Boolean getCharging() { return charging; } public void setCharging(Boolean charging) { this.charging = charging; } /** **/ @ApiModelProperty(value = "") public Integer getPercentage() { return percentage; } public void setPercentage(Integer percentage) { this.percentage = percentage; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BatteryResponse batteryResponse = (BatteryResponse) o; return (this.success == null ? batteryResponse.success == null : this.success.equals(batteryResponse.success)) && (this.charging == null ? batteryResponse.charging == null : this.charging.equals(batteryResponse.charging)) && (this.percentage == null ? batteryResponse.percentage == null : this.percentage.equals(batteryResponse.percentage)); } @Override public int hashCode() { int result = 17; result = 31 * result + (this.success == null ? 0: this.success.hashCode()); result = 31 * result + (this.charging == null ? 0: this.charging.hashCode()); result = 31 * result + (this.percentage == null ? 0: this.percentage.hashCode()); return result; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class BatteryResponse {\n"); sb.append(" success: ").append(success).append("\n"); sb.append(" charging: ").append(charging).append("\n"); sb.append(" percentage: ").append(percentage).append("\n"); sb.append("}\n"); return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.llap.cache; import com.google.common.base.Function; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.io.Allocator; import org.apache.hadoop.hive.common.io.DataCache.BooleanRef; import org.apache.hadoop.hive.common.io.DataCache.DiskRangeListFactory; import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer; import org.apache.hadoop.hive.common.io.CacheTag; import org.apache.hadoop.hive.llap.DebugUtils; import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; import org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheMetrics; import org.apache.hadoop.hive.llap.metrics.LlapMetricsSystem; import org.apache.hadoop.hive.llap.metrics.ReadWriteLockMetrics; import org.apache.hadoop.metrics2.MetricsSource; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hive.common.util.Ref; import org.apache.orc.OrcProto; import org.apache.orc.OrcProto.ColumnEncoding; public class SerDeLowLevelCacheImpl implements BufferUsageManager, LlapIoDebugDump, Configurable { private static final int DEFAULT_CLEANUP_INTERVAL = 600; private Configuration conf; private final Allocator allocator; private final AtomicInteger newEvictions = new AtomicInteger(0); private Thread cleanupThread = null; private final ConcurrentHashMap<Object, FileCache<FileData>> cache = new ConcurrentHashMap<>(); private final LowLevelCachePolicy cachePolicy; private final long cleanupInterval; private final LlapDaemonCacheMetrics metrics; /// Shared singleton MetricsSource instance for all FileData locks private static final MetricsSource LOCK_METRICS; static { // create and register the MetricsSource for lock metrics MetricsSystem ms = LlapMetricsSystem.instance(); ms.register("FileDataLockMetrics", "Lock metrics for R/W locks around FileData instances", LOCK_METRICS = ReadWriteLockMetrics.createLockMetricsSource("FileData")); } public static final class LlapSerDeDataBuffer extends LlapAllocatorBuffer { public boolean isCached = false; private CacheTag tag; @Override public void notifyEvicted(EvictionDispatcher evictionDispatcher) { evictionDispatcher.notifyEvicted(this); } public void setTag(CacheTag tag) { this.tag = tag; } @Override public CacheTag getTag() { return tag; } } private static final class StripeInfoComparator implements Comparator<StripeData> { @Override public int compare(StripeData o1, StripeData o2) { int starts = Long.compare(o1.knownTornStart, o2.knownTornStart); if (starts != 0) return starts; starts = Long.compare(o1.firstStart, o2.firstStart); if (starts != 0) return starts; assert (o1.lastStart == o2.lastStart) == (o1.lastEnd == o2.lastEnd); return Long.compare(o1.lastStart, o2.lastStart); } } public static class FileData { /** * RW lock ensures we have a consistent view of the file data, which is important given that * we generate "stripe" boundaries arbitrarily. Reading buffer data itself doesn't require * that this lock is held; however, everything else in stripes list does. * TODO: make more granular? We only care that each one reader sees consistent boundaries. * So, we could shallow-copy the stripes list, then have individual locks inside each. */ private final ReadWriteLock rwLock; private final Object fileKey; private final int colCount; private ArrayList<StripeData> stripes; public FileData(Configuration conf, Object fileKey, int colCount) { this.fileKey = fileKey; this.colCount = colCount; rwLock = ReadWriteLockMetrics.wrap(conf, new ReentrantReadWriteLock(), LOCK_METRICS); } public void toString(StringBuilder sb) { sb.append("File data for ").append(fileKey).append(" with ").append(colCount) .append(" columns: ").append(stripes); } public int getColCount() { return colCount; } public ArrayList<StripeData> getData() { return stripes; } public void addStripe(StripeData sd) { if (stripes == null) { stripes = new ArrayList<>(); } stripes.add(sd); } @Override public String toString() { return "[fileKey=" + fileKey + ", colCount=" + colCount + ", stripes=" + stripes + "]"; } } public static final class StripeData { // In LRR case, if we just store 2 boundaries (which could be split boundaries or reader // positions), we wouldn't be able to account for torn rows correctly because the semantics of // our "exact" reader positions, and inexact split boundaries, are different. We cannot even // tell LRR to use exact boundaries, as there can be a mismatch in an original mid-file split // wrt first row when caching - we may produce incorrect result if we adjust the split // boundary, and also if we don't adjust it, depending where it falls. At best, we'd end up // with spurious disk reads if we cache on row boundaries but splits include torn rows. // This structure implies that when reading a split, we skip the first torn row but fully // read the last torn row (as LineRecordReader does). If we want to support a different scheme, // we'd need to store more offsets and make logic account for that. private long knownTornStart; // This can change based on new splits. private final long firstStart, lastStart, lastEnd; // TODO: we can actually consider storing ALL the delta encoded row offsets - not a lot of // overhead compared to the data itself, and with row offsets, we could use columnar // blocks for inconsistent splits. We are not optimizing for inconsistent splits for now. private final long rowCount; private final OrcProto.ColumnEncoding[] encodings; private LlapSerDeDataBuffer[][][] data; // column index, stream type, buffers public StripeData(long knownTornStart, long firstStart, long lastStart, long lastEnd, long rowCount, ColumnEncoding[] encodings) { this.knownTornStart = knownTornStart; this.firstStart = firstStart; this.lastStart = lastStart; this.lastEnd = lastEnd; this.encodings = encodings; this.rowCount = rowCount; this.data = encodings == null ? null : new LlapSerDeDataBuffer[encodings.length][][]; } @Override public String toString() { return toCoordinateString() + " with encodings [" + Arrays.toString(encodings) .replace('\n', ' ') + "] and data " + SerDeLowLevelCacheImpl.toString(data); } public long getKnownTornStart() { return knownTornStart; } public long getFirstStart() { return firstStart; } public long getLastStart() { return lastStart; } public long getLastEnd() { return lastEnd; } public long getRowCount() { return rowCount; } public OrcProto.ColumnEncoding[] getEncodings() { return encodings; } public LlapSerDeDataBuffer[][][] getData() { return data; } public String toCoordinateString() { return "stripe kts " + knownTornStart + " from " + firstStart + " to [" + lastStart + ", " + lastEnd + ")"; } public static StripeData duplicateStructure(StripeData s) { return new StripeData(s.knownTornStart, s.firstStart, s.lastStart, s.lastEnd, s.rowCount, new OrcProto.ColumnEncoding[s.encodings.length]); } public void setKnownTornStart(long value) { knownTornStart = value; } } public static String toString(LlapSerDeDataBuffer[][][] data) { if (data == null) return "null"; StringBuilder sb = new StringBuilder("["); for (int i = 0; i < data.length; ++i) { LlapSerDeDataBuffer[][] colData = data[i]; if (colData == null) { sb.append("null, "); continue; } sb.append("colData ["); for (int j = 0; j < colData.length; ++j) { LlapSerDeDataBuffer[] streamData = colData[j]; if (streamData == null) { sb.append("null, "); continue; } sb.append("buffers ["); for (int k = 0; k < streamData.length; ++k) { sb.append(streamData[k]); } sb.append("], "); } sb.append("], "); } sb.append("]"); return sb.toString(); } public static String toString(LlapSerDeDataBuffer[][] data) { if (data == null) return "null"; StringBuilder sb = new StringBuilder("["); for (int j = 0; j < data.length; ++j) { LlapSerDeDataBuffer[] streamData = data[j]; if (streamData == null) { sb.append("null, "); continue; } sb.append("["); for (int k = 0; k < streamData.length; ++k) { sb.append(streamData[k]); } sb.append("], "); } sb.append("]"); return sb.toString(); } public SerDeLowLevelCacheImpl( LlapDaemonCacheMetrics metrics, LowLevelCachePolicy cachePolicy, Allocator allocator) { this.cachePolicy = cachePolicy; this.allocator = allocator; this.cleanupInterval = DEFAULT_CLEANUP_INTERVAL; this.metrics = metrics; LlapIoImpl.LOG.info("SerDe low-level level cache; cleanup interval {} sec", cleanupInterval); } public void startThreads() { if (cleanupInterval < 0) return; cleanupThread = new CleanupThread(cache, newEvictions, cleanupInterval); cleanupThread.start(); } public FileData getFileData(Object fileKey, long start, long end, boolean[] includes, DiskRangeListFactory factory, LowLevelCacheCounters qfCounters, BooleanRef gotAllData) throws IOException { FileCache<FileData> subCache = cache.get(fileKey); if (subCache == null || !subCache.incRef()) { if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Cannot find cache for " + fileKey + " in " + cache); } markAllAsMissed(start, end, qfCounters, gotAllData); return null; } try { FileData cached = subCache.getCache(); cached.rwLock.readLock().lock(); if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Cache for " + fileKey + " is " + subCache.getCache()); } try { if (cached.stripes == null) { LlapIoImpl.CACHE_LOGGER.debug("Cannot find any stripes for " + fileKey); markAllAsMissed(start, end, qfCounters, gotAllData); return null; } if (includes.length > cached.colCount) { throw new IOException("Includes " + DebugUtils.toString(includes) + " for " + cached.colCount + " columns"); } FileData result = new FileData(conf, cached.fileKey, cached.colCount); if (gotAllData != null) { gotAllData.value = true; } // We will adjust start and end so that we could record the metrics; save the originals. long origStart = start, origEnd = end; // startIx is inclusive, endIx is exclusive. int startIx = Integer.MIN_VALUE, endIx = Integer.MIN_VALUE; LlapIoImpl.CACHE_LOGGER.debug("Looking for data between " + start + " and " + end); for (int i = 0; i < cached.stripes.size() && endIx == Integer.MIN_VALUE; ++i) { StripeData si = cached.stripes.get(i); if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Looking at " + si.toCoordinateString()); } if (startIx == i) { // The start of the split was in the middle of the previous slice. start = si.knownTornStart; } else if (startIx == Integer.MIN_VALUE) { // Determine if we need to read this slice for the split. if (si.lastEnd <= start) continue; // Slice before the start of the split. // Start of the split falls somewhere within or before this slice. // Note the ">=" - LineRecordReader will skip the first row even if we start // directly at its start, because it cannot know if it's the start or not. // Unless it's 0; note that we DO give 0 special treatment here, unlike the EOF below, // because zero is zero. Need to mention it in Javadoc. if (start == 0 && si.firstStart == 0) { startIx = i; } else if (start >= si.firstStart) { // If the start of the split points into the middle of the cached slice, we cannot // use the cached block - it's encoded and columnar, so we cannot map the file // offset to some "offset" in "middle" of the slice (but see TODO for firstStart). startIx = i + 1; // continue; } else { // Start of the split is before this slice. startIx = i; // Simple case - we will read cache from the split start offset. start = si.knownTornStart; } } // Determine if this (or previous) is the last slice we need to read for this split. if (startIx != Integer.MIN_VALUE && endIx == Integer.MIN_VALUE) { if (si.lastEnd <= end) { // The entire current slice is part of the split. Note that if split end EQUALS // lastEnd, the split would also read the next row, so we do need to look at the // next slice, if any (although we'd probably find we cannot use it). // Note also that we DO NOT treat end-of-file differently here, cause we do not know // of any such thing. The caller must handle lastEnd vs end of split vs end of file // match correctly in terms of how LRR handles them. See above for start-of-file. if (i + 1 != cached.stripes.size()) continue; endIx = i + 1; end = si.lastEnd; } else if (si.lastStart <= end) { // The split ends within (and would read) the last row of this slice. Exact match. endIx = i + 1; end = si.lastEnd; } else { // Either the slice comes entirely after the end of split (following a gap in cached // data); or the split ends in the middle of the slice, so it's the same as in the // startIx logic w.r.t. the partial match; so, we either don't want to, or cannot, // use this. There's no need to distinguish these two cases for now. endIx = i; end = (endIx > 0) ? cached.stripes.get(endIx - 1).lastEnd : start; } } } LlapIoImpl.CACHE_LOGGER.debug("Determined stripe indexes " + startIx + ", " + endIx); if (endIx <= startIx) { if (gotAllData != null) { gotAllData.value = false; } return null; // No data for the split, or it fits in the middle of one or two slices. } if (start > origStart || end < origEnd) { if (gotAllData != null) { gotAllData.value = false; } long totalMiss = Math.max(0, origEnd - end) + Math.max(0, start - origStart); metrics.incrCacheRequestedBytes(totalMiss); if (qfCounters != null) { qfCounters.recordCacheMiss(totalMiss); } } result.stripes = new ArrayList<>(endIx - startIx); for (int stripeIx = startIx; stripeIx < endIx; ++stripeIx) { getCacheDataForOneSlice(stripeIx, cached, result, gotAllData, includes, qfCounters); } return result; } finally { cached.rwLock.readLock().unlock(); } } finally { subCache.decRef(); } } private void getCacheDataForOneSlice(int stripeIx, FileData cached, FileData result, BooleanRef gotAllData, boolean[] includes, LowLevelCacheCounters qfCounters) { StripeData cStripe = cached.stripes.get(stripeIx); if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Got stripe in cache " + cStripe); } StripeData stripe = StripeData.duplicateStructure(cStripe); result.stripes.add(stripe); boolean isMissed = false; for (int colIx = 0; colIx < cached.colCount; ++colIx) { if (!includes[colIx]) continue; if (cStripe.encodings[colIx] == null || cStripe.data[colIx] == null) { if (cStripe.data[colIx] != null) { throw new AssertionError(cStripe); // No encoding => must have no data. } isMissed = true; if (gotAllData != null) { gotAllData.value = false; } continue; } stripe.encodings[colIx] = cStripe.encodings[colIx]; LlapSerDeDataBuffer[][] cColData = cStripe.data[colIx]; assert cColData != null; for (int streamIx = 0; cColData != null && streamIx < cColData.length; ++streamIx) { LlapSerDeDataBuffer[] streamData = cColData[streamIx]; // Note: this relies on the fact that we always evict the entire column, so if // we have the column data, we assume we have all the streams we need. if (streamData == null) continue; for (int i = 0; i < streamData.length; ++i) { // Finally, we are going to use "i"! if (!lockBuffer(streamData[i], true)) { LlapIoImpl.CACHE_LOGGER.info("Couldn't lock data for stripe at " + stripeIx + ", colIx " + colIx + ", stream type " + streamIx); handleRemovedColumnData(cColData); cColData = null; isMissed = true; if (gotAllData != null) { gotAllData.value = false; } break; } } } // At this point, we have arrived at the level where we need all the data, and the // arrays never change. So we will just do a shallow assignment here instead of copy. stripe.data[colIx] = cColData; if (cColData == null) { stripe.encodings[colIx] = null; } } doMetricsStuffForOneSlice(qfCounters, stripe, isMissed); } private void doMetricsStuffForOneSlice( LowLevelCacheCounters qfCounters, StripeData stripe, boolean isMissed) { // Slice boundaries may not match split boundaries due to torn rows in either direction, // so this counter may not be consistent with splits. This is also why we increment // requested bytes here, instead of based on the split - we don't want the metrics to be // inconsistent with each other. No matter what we determine here, at least we'll account // for both in the same manner. long bytes = stripe.lastEnd - stripe.knownTornStart; metrics.incrCacheRequestedBytes(bytes); if (!isMissed) { metrics.incrCacheHitBytes(bytes); } if (qfCounters != null) { if (isMissed) { qfCounters.recordCacheMiss(bytes); } else { qfCounters.recordCacheHit(bytes); } } } private void markAllAsMissed(long from, long to, LowLevelCacheCounters qfCounters, BooleanRef gotAllData) { if (qfCounters != null) { metrics.incrCacheRequestedBytes(to - from); qfCounters.recordCacheMiss(to - from); } if (gotAllData != null) { gotAllData.value = false; } } private boolean lockBuffer(LlapSerDeDataBuffer buffer, boolean doNotifyPolicy) { int rc = buffer.incRef(); if (rc > 0) { metrics.incrCacheNumLockedBuffers(); } if (doNotifyPolicy && rc == 1) { // We have just locked a buffer that wasn't previously locked. cachePolicy.notifyLock(buffer); } return rc > 0; } public void putFileData(final FileData data, Priority priority, LowLevelCacheCounters qfCounters, CacheTag tag) { // TODO: buffers are accounted for at allocation time, but ideally we should report the memory // overhead from the java objects to memory manager and remove it when discarding file. if (data.stripes == null || data.stripes.isEmpty()) { LlapIoImpl.LOG.warn("Trying to cache FileData with no data for " + data.fileKey); return; } FileCache<FileData> subCache = null; FileData cached = null; data.rwLock.writeLock().lock(); try { subCache = FileCache.getOrAddFileSubCache( cache, data.fileKey, new Function<Void, FileData>() { @Override public FileData apply(Void input) { return data; // If we don't have a file cache, we will add this one as is. } }); cached = subCache.getCache(); } finally { if (data != cached) { data.rwLock.writeLock().unlock(); } } try { if (data != cached) { cached.rwLock.writeLock().lock(); } try { for (StripeData si : data.stripes) { lockAllBuffersForPut(si, priority, tag); } if (data == cached) { if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Cached new data " + data); } return; } if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Merging old " + cached + " and new " + data); } ArrayList<StripeData> combined = new ArrayList<>( cached.stripes.size() + data.stripes.size()); combined.addAll(cached.stripes); combined.addAll(data.stripes); Collections.sort(combined, new StripeInfoComparator()); int lastIx = combined.size() - 1; for (int ix = 0; ix < lastIx; ++ix) { StripeData cur = combined.get(ix), next = combined.get(ix + 1); if (cur.lastEnd <= next.firstStart) continue; // All good. if (cur.firstStart == next.firstStart && cur.lastEnd == next.lastEnd) { mergeStripeInfos(cur, next); combined.remove(ix + 1); --lastIx; // Don't recheck with next, only 2 lists each w/o collisions. continue; } // The original lists do not contain collisions, so only one is 'old'. boolean isCurOriginal = cached.stripes.contains(cur); handleRemovedStripeInfo(combined.remove(isCurOriginal ? ix : ix + 1)); --ix; --lastIx; } cached.stripes = combined; if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("New cache data is " + combined); } } finally { cached.rwLock.writeLock().unlock(); } } finally { subCache.decRef(); } } private void lockAllBuffersForPut(StripeData si, Priority priority, CacheTag tag) { for (int i = 0; i < si.data.length; ++i) { LlapSerDeDataBuffer[][] colData = si.data[i]; if (colData == null) continue; for (int j = 0; j < colData.length; ++j) { LlapSerDeDataBuffer[] streamData = colData[j]; if (streamData == null) continue; for (int k = 0; k < streamData.length; ++k) { boolean canLock = lockBuffer(streamData[k], false); // false - not in cache yet assert canLock; streamData[k].setTag(tag); cachePolicy.cache(streamData[k], priority); streamData[k].isCached = true; } } } } private void handleRemovedStripeInfo(StripeData removed) { for (LlapSerDeDataBuffer[][] colData : removed.data) { handleRemovedColumnData(colData); } } private void handleRemovedColumnData(LlapSerDeDataBuffer[][] removed) { // TODO: could we tell the policy that we don't care about these and have them evicted? or we // could just deallocate them when unlocked, and free memory + handle that in eviction. // For now, just abandon the blocks - eventually, they'll get evicted. } private void mergeStripeInfos(StripeData to, StripeData from) { if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Merging slices data: old " + to + " and new " + from); } to.knownTornStart = Math.min(to.knownTornStart, from.knownTornStart); if (from.encodings.length != to.encodings.length) { throw new RuntimeException("Different encodings " + from + "; " + to); } for (int colIx = 0; colIx < from.encodings.length; ++colIx) { if (to.encodings[colIx] == null) { to.encodings[colIx] = from.encodings[colIx]; } else if (from.encodings[colIx] != null && !to.encodings[colIx].equals(from.encodings[colIx])) { throw new RuntimeException("Different encodings at " + colIx + ": " + from + "; " + to); } LlapSerDeDataBuffer[][] fromColData = from.data[colIx]; if (fromColData != null) { if (to.data[colIx] != null) { // Note: we assume here that the data that was returned to the caller from cache will not // be passed back in via put. Right now it's safe since we don't do anything. But if we // evict proactively, we will have to compare objects all the way down. handleRemovedColumnData(to.data[colIx]); } to.data[colIx] = fromColData; } } } @Override public void decRefBuffer(MemoryBuffer buffer) { unlockBuffer((LlapSerDeDataBuffer)buffer, true); } @Override public void decRefBuffers(List<MemoryBuffer> cacheBuffers) { for (MemoryBuffer b : cacheBuffers) { unlockBuffer((LlapSerDeDataBuffer)b, true); } } private void unlockBuffer(LlapSerDeDataBuffer buffer, boolean handleLastDecRef) { boolean isLastDecref = (buffer.decRef() == 0); if (handleLastDecRef && isLastDecref) { if (buffer.isCached) { cachePolicy.notifyUnlock(buffer); } else { if (LlapIoImpl.CACHE_LOGGER.isTraceEnabled()) { LlapIoImpl.CACHE_LOGGER.trace("Deallocating {} that was not cached", buffer); } allocator.deallocate(buffer); } } metrics.decrCacheNumLockedBuffers(); } public final void notifyEvicted(MemoryBuffer buffer) { newEvictions.incrementAndGet(); // FileCacheCleanupThread might we waiting for eviction increment synchronized(newEvictions) { newEvictions.notifyAll(); } } private final class CleanupThread extends FileCacheCleanupThread<FileData> { public CleanupThread(ConcurrentHashMap<Object, FileCache<FileData>> fileMap, AtomicInteger newEvictions, long cleanupInterval) { super("Llap serde low level cache cleanup thread", fileMap, newEvictions, cleanupInterval); } @Override protected int getCacheSize(FileCache<FileData> fc) { return 1; // Each iteration cleans the file cache as a single unit (unlike the ORC cache). } @Override public int cleanUpOneFileCache(FileCache<FileData> fc, int leftToCheck, long endTime, Ref<Boolean> isPastEndTime) throws InterruptedException { FileData fd = fc.getCache(); fd.rwLock.writeLock().lock(); try { for (StripeData sd : fd.stripes) { for (int colIx = 0; colIx < sd.data.length; ++colIx) { LlapSerDeDataBuffer[][] colData = sd.data[colIx]; if (colData == null) continue; boolean hasAllData = true; for (int j = 0; (j < colData.length) && hasAllData; ++j) { LlapSerDeDataBuffer[] streamData = colData[j]; if (streamData == null) continue; for (int k = 0; k < streamData.length; ++k) { LlapSerDeDataBuffer buf = streamData[k]; hasAllData = hasAllData && lockBuffer(buf, false); if (!hasAllData) break; unlockBuffer(buf, true); } } if (!hasAllData) { handleRemovedColumnData(colData); sd.data[colIx] = null; } } } } finally { fd.rwLock.writeLock().unlock(); } return leftToCheck - 1; } } @Override public boolean incRefBuffer(MemoryBuffer buffer) { // notifyReused implies that buffer is already locked; it's also called once for new // buffers that are not cached yet. Don't notify cache policy. return lockBuffer(((LlapSerDeDataBuffer)buffer), false); } @Override public Allocator getAllocator() { return allocator; } @Override public void setConf(Configuration newConf) { this.conf = newConf; } @Override public Configuration getConf() { return conf; } @Override public void debugDumpShort(StringBuilder sb) { sb.append("\nSerDe cache state "); int allLocked = 0, allUnlocked = 0, allEvicted = 0, allMoving = 0; for (Map.Entry<Object, FileCache<FileData>> e : cache.entrySet()) { if (!e.getValue().incRef()) continue; try { FileData fd = e.getValue().getCache(); int fileLocked = 0, fileUnlocked = 0, fileEvicted = 0, fileMoving = 0; sb.append(fd.colCount).append(" columns, ").append(fd.stripes.size()).append(" stripes; "); for (StripeData stripe : fd.stripes) { if (stripe.data == null) continue; for (int i = 0; i < stripe.data.length; ++i) { LlapSerDeDataBuffer[][] colData = stripe.data[i]; if (colData == null) continue; for (int j = 0; j < colData.length; ++j) { LlapSerDeDataBuffer[] streamData = colData[j]; if (streamData == null) continue; for (int k = 0; k < streamData.length; ++k) { int newRc = streamData[k].incRef(); if (newRc < 0) { if (newRc == LlapAllocatorBuffer.INCREF_EVICTED) { ++fileEvicted; } else if (newRc == LlapAllocatorBuffer.INCREF_FAILED) { ++fileMoving; } continue; } try { if (newRc > 1) { // We hold one refcount. ++fileLocked; } else { ++fileUnlocked; } } finally { streamData[k].decRef(); } } } } } allLocked += fileLocked; allUnlocked += fileUnlocked; allEvicted += fileEvicted; allMoving += fileMoving; sb.append("\n file " + e.getKey() + ": " + fileLocked + " locked, " + fileUnlocked + " unlocked, " + fileEvicted + " evicted, " + fileMoving + " being moved"); } finally { e.getValue().decRef(); } } sb.append("\nSerDe cache summary: " + allLocked + " locked, " + allUnlocked + " unlocked, " + allEvicted + " evicted, " + allMoving + " being moved"); } }
/* * Copyright 2019-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Primary-backup server interface implementations. */ package io.atomix.protocols.backup.impl;
package io.virtualan.sv.mock.to; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModelProperty; /** * Profile */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.SpringCodegen", date = "2021-04-11T00:52:21.637-05:00[America/Chicago]") public class Profile { @JsonProperty("first_name") private String firstName; @JsonProperty("last_name") private String lastName; @JsonProperty("email") private String email; @JsonProperty("picture") private String picture; @JsonProperty("promo_code") private String promoCode; public Profile firstName(String firstName) { this.firstName = firstName; return this; } /** * First name of the Uber user. * @return firstName */ @ApiModelProperty(value = "First name of the Uber user.") public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public Profile lastName(String lastName) { this.lastName = lastName; return this; } /** * Last name of the Uber user. * @return lastName */ @ApiModelProperty(value = "Last name of the Uber user.") public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public Profile email(String email) { this.email = email; return this; } /** * Email address of the Uber user * @return email */ @ApiModelProperty(value = "Email address of the Uber user") public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public Profile picture(String picture) { this.picture = picture; return this; } /** * Image URL of the Uber user. * @return picture */ @ApiModelProperty(value = "Image URL of the Uber user.") public String getPicture() { return picture; } public void setPicture(String picture) { this.picture = picture; } public Profile promoCode(String promoCode) { this.promoCode = promoCode; return this; } /** * Promo code of the Uber user. * @return promoCode */ @ApiModelProperty(value = "Promo code of the Uber user.") public String getPromoCode() { return promoCode; } public void setPromoCode(String promoCode) { this.promoCode = promoCode; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Profile profile = (Profile) o; return Objects.equals(this.firstName, profile.firstName) && Objects.equals(this.lastName, profile.lastName) && Objects.equals(this.email, profile.email) && Objects.equals(this.picture, profile.picture) && Objects.equals(this.promoCode, profile.promoCode); } @Override public int hashCode() { return Objects.hash(firstName, lastName, email, picture, promoCode); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Profile {\n"); sb.append(" firstName: ").append(toIndentedString(firstName)).append("\n"); sb.append(" lastName: ").append(toIndentedString(lastName)).append("\n"); sb.append(" email: ").append(toIndentedString(email)).append("\n"); sb.append(" picture: ").append(toIndentedString(picture)).append("\n"); sb.append(" promoCode: ").append(toIndentedString(promoCode)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.data.restricted; import com.google.common.collect.Lists; import com.google.template.soy.data.SoyData; import com.google.template.soy.data.SoyDataException; import com.google.template.soy.data.SoyListData; import com.google.template.soy.data.SoyMapData; import java.util.List; /** * Abstract superclass for a node in a Soy data tree that represents a collection of data (i.e. an * internal node). * * <p> Important: This class may only be used in implementing plugins (e.g. functions, directives). * * @author Kai Huang */ public abstract class CollectionData extends SoyData { // ------------ put() ------------ /** * Convenience function to put multiple mappings in one call. * @param data The mappings to put, as alternating keys/values. Indices 0, 2, 4, ... must be valid * key strings. Indices 1, 3, 5, ... must be valid Soy data values. * @throws SoyDataException When attempting to add an invalid varargs list or a mapping containing * an invalid key. */ public void put(Object... data) { // TODO: Perhaps change to only convert varargs to Map, and do put(Map) elsewhere. if (data.length % 2 != 0) { throw new SoyDataException( "Varargs to put(...) must have an even number of arguments (key-value pairs)."); } for (int i = 0; i < data.length; i += 2) { try { put((String) data[i], SoyData.createFromExistingData(data[i + 1])); } catch (ClassCastException cce) { throw new SoyDataException( "Attempting to add a mapping containing a non-string key (key type " + data[i].getClass().getName() + ")."); } } } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, SoyData value) { List<String> keys = split(keyStr, '.'); int numKeys = keys.size(); CollectionData collectionData = this; for (int i = 0; i <= numKeys - 2; ++i) { SoyData nextSoyData = collectionData.getSingle(keys.get(i)); if (nextSoyData != null && !(nextSoyData instanceof CollectionData)) { throw new SoyDataException( "Failed to evaluate key string \"" + keyStr + "\" for put()."); } CollectionData nextCollectionData = (CollectionData) nextSoyData; if (nextCollectionData == null) { // Create the SoyData object that will be bound to keys.get(i). We need to check the first // part of keys[i+1] to know whether to create a SoyMapData or SoyListData (checking the // first char is sufficient). nextCollectionData = (Character.isDigit(keys.get(i + 1).charAt(0))) ? new SoyListData() : new SoyMapData(); collectionData.putSingle(keys.get(i), nextCollectionData); } collectionData = nextCollectionData; } collectionData.putSingle(keys.get(numKeys - 1), ensureValidValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, boolean value) { put(keyStr, BooleanData.forValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, int value) { put(keyStr, IntegerData.forValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, double value) { put(keyStr, FloatData.forValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, String value) { put(keyStr, StringData.forValue(value)); } // ------------ remove() ------------ /** * Removes the data at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. */ public void remove(String keyStr) { List<String> keys = split(keyStr, '.'); int numKeys = keys.size(); CollectionData collectionData = this; for (int i = 0; i <= numKeys - 2; ++i) { SoyData soyData = collectionData.getSingle(keys.get(i)); if (soyData == null || !(soyData instanceof CollectionData)) { return; } collectionData = (CollectionData) soyData; } collectionData.removeSingle(keys.get(numKeys - 1)); } // ------------ get*() ------------ /** * Gets the data at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The data at the specified key string, or null if there's no data at the location. */ public SoyData get(String keyStr) { List<String> keys = split(keyStr, '.'); int numKeys = keys.size(); CollectionData collectionData = this; for (int i = 0; i <= numKeys - 2; ++i) { SoyData soyData = collectionData.getSingle(keys.get(i)); if (soyData == null || !(soyData instanceof CollectionData)) { return null; } collectionData = (CollectionData) soyData; } return collectionData.getSingle(keys.get(numKeys - 1)); } /** * Precondition: The specified key string is the path to a SoyMapData object. * Gets the SoyMapData at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The SoyMapData at the specified key string, or null if no data is stored there. */ public SoyMapData getMapData(String keyStr) { return (SoyMapData) get(keyStr); } /** * Precondition: The specified key string is the path to a SoyListData object. * Gets the SoyListData at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The SoyListData at the specified key string, or null if no data is stored there. */ public SoyListData getListData(String keyStr) { return (SoyListData) get(keyStr); } /** * Precondition: The specified key string is the path to a boolean. * Gets the boolean at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The boolean at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public boolean getBoolean(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.booleanValue(); } /** * Precondition: The specified key string is the path to an integer. * Gets the integer at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The integer at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public int getInteger(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.integerValue(); } /** * Precondition: The specified key string is the path to a float. * Gets the float at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The float at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public double getFloat(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.floatValue(); } /** * Precondition: The specified key string is the path to a string. * Gets the string at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The string at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public String getString(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.stringValue(); } // ----------------------------------------------------------------------------------------------- // Superpackage-private methods. /** * Important: Do not use outside of Soy code (treat as superpackage-private). * * Puts data into this data object at the specified key. * @param key An individual key. * @param value The data to put at the specified key. */ public abstract void putSingle(String key, SoyData value); /** * Important: Do not use outside of Soy code (treat as superpackage-private). * * Removes the data at the specified key. * @param key An individual key. */ public abstract void removeSingle(String key); /** * Important: Do not use outside of Soy code (treat as superpackage-private). * * Gets the data at the specified key. * @param key An individual key. * @return The data at the specified key, or null if the key is not defined. */ public abstract SoyData getSingle(String key); // ----------------------------------------------------------------------------------------------- // Protected/private helpers. /** * Ensures that the given value is valid for insertion into a Soy data tree. If the value is not * null, then simply returns it, else return NullData. * @param value The value to ensure validity for. * @return The given value if it's not null, or NullData if it is null. */ protected static SoyData ensureValidValue(SoyData value) { return (value != null) ? value : NullData.INSTANCE; } /** * Splits a string into tokens at the specified delimiter. * @param str The string to split. Must not be null. * @param delim The delimiter character. * @return A list of tokens. Will not return null. */ private static List<String> split(String str, char delim) { List<String> result = Lists.newArrayList(); int currPartStart = 0; while (true) { int currPartEnd = str.indexOf(delim, currPartStart); if (currPartEnd == -1) { result.add(str.substring(currPartStart)); break; } else { result.add(str.substring(currPartStart, currPartEnd)); currPartStart = currPartEnd + 1; } } return result; } }